Part I consists of building a model using neural networks as a regressor to solve industry based problem.
Part II consists of building a model using neural networks as a classifier to solve industry based problem.
import numpy as np
import pandas as pd
import os
import csv
import matplotlib.pyplot as plt
%matplotlib inline
import seaborn as sns
from termcolor import colored
import warnings
warnings.filterwarnings('ignore')
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from keras.wrappers.scikit_learn import KerasClassifier
from scipy.stats import zscore
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
from sklearn.metrics import accuracy_score, confusion_matrix, precision_score, recall_score, f1_score, precision_recall_curve, auc
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras import optimizers,regularizers
from tensorflow.keras.layers.experimental import preprocessing
from tensorflow.python.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Dense
from tensorflow.python.keras.wrappers.scikit_learn import KerasRegressor
from keras.layers import Dense ,Dropout,BatchNormalization, Activation
from keras.wrappers.scikit_learn import KerasRegressor
import tensorflow as tf
import kerastuner as kt
from kerastuner.tuners import RandomSearch
#from deployml.keras import NeuralNetworkBase
print(tf.__version__)
2.4.1
print(colored('\x1B[1mLoading Dataset....','blue'))
df = pd.read_csv('Part- 1,2&3 - Signal.csv')
Loading Dataset....
pd.set_option("display.max_rows", None)
pd.set_option("display.max_columns", None)
df.head(10)
| Parameter 1 | Parameter 2 | Parameter 3 | Parameter 4 | Parameter 5 | Parameter 6 | Parameter 7 | Parameter 8 | Parameter 9 | Parameter 10 | Parameter 11 | Signal_Strength | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.9978 | 3.51 | 0.56 | 9.4 | 5 |
| 1 | 7.8 | 0.88 | 0.00 | 2.6 | 0.098 | 25.0 | 67.0 | 0.9968 | 3.20 | 0.68 | 9.8 | 5 |
| 2 | 7.8 | 0.76 | 0.04 | 2.3 | 0.092 | 15.0 | 54.0 | 0.9970 | 3.26 | 0.65 | 9.8 | 5 |
| 3 | 11.2 | 0.28 | 0.56 | 1.9 | 0.075 | 17.0 | 60.0 | 0.9980 | 3.16 | 0.58 | 9.8 | 6 |
| 4 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.9978 | 3.51 | 0.56 | 9.4 | 5 |
| 5 | 7.4 | 0.66 | 0.00 | 1.8 | 0.075 | 13.0 | 40.0 | 0.9978 | 3.51 | 0.56 | 9.4 | 5 |
| 6 | 7.9 | 0.60 | 0.06 | 1.6 | 0.069 | 15.0 | 59.0 | 0.9964 | 3.30 | 0.46 | 9.4 | 5 |
| 7 | 7.3 | 0.65 | 0.00 | 1.2 | 0.065 | 15.0 | 21.0 | 0.9946 | 3.39 | 0.47 | 10.0 | 7 |
| 8 | 7.8 | 0.58 | 0.02 | 2.0 | 0.073 | 9.0 | 18.0 | 0.9968 | 3.36 | 0.57 | 9.5 | 7 |
| 9 | 7.5 | 0.50 | 0.36 | 6.1 | 0.071 | 17.0 | 102.0 | 0.9978 | 3.35 | 0.80 | 10.5 | 5 |
print(colored('\x1B[1mGetting shape of the dataframe','blue'))
print('The data contains \nNo. of rows = ',df.shape[0],'\nNo. of columns = ',df.shape[1])
Getting shape of the dataframe
The data contains
No. of rows = 1599
No. of columns = 12
print(colored('\x1B[1mGetting size of the dataframe','blue'))
df.size
Getting size of the dataframe
19188
print(colored('\x1B[1mGetting unique values of column in dataframe','blue'))
print(colored('\x1B[1mColumn Name\tNo. of unique values'))
df.nunique(axis=0)
Getting unique values of column in dataframe Column Name No. of unique values
Parameter 1 96 Parameter 2 143 Parameter 3 80 Parameter 4 91 Parameter 5 153 Parameter 6 60 Parameter 7 144 Parameter 8 436 Parameter 9 89 Parameter 10 96 Parameter 11 65 Signal_Strength 6 dtype: int64
print(df['Signal_Strength'].unique())
[5 6 7 4 8 3]
print(colored('\x1B[1mGetting data-types of columns','blue'))
df.dtypes
Getting data-types of columns
Parameter 1 float64 Parameter 2 float64 Parameter 3 float64 Parameter 4 float64 Parameter 5 float64 Parameter 6 float64 Parameter 7 float64 Parameter 8 float64 Parameter 9 float64 Parameter 10 float64 Parameter 11 float64 Signal_Strength int64 dtype: object
print(colored('\x1B[1mChecking negative values','blue'))
for x in df.columns:
att = df[x]
a = att.lt(0).sum()
if a != 0:
print(x,'has Negative Value of',a)
else:
print('No Negative Value in',x)
Checking negative values
No Negative Value in Parameter 1
No Negative Value in Parameter 2
No Negative Value in Parameter 3
No Negative Value in Parameter 4
No Negative Value in Parameter 5
No Negative Value in Parameter 6
No Negative Value in Parameter 7
No Negative Value in Parameter 8
No Negative Value in Parameter 9
No Negative Value in Parameter 10
No Negative Value in Parameter 11
No Negative Value in Signal_Strength
print(colored('\x1B[1mGetting Nan values of column in dataframe','blue'))
print('Column Name\tNo. of Nan values')
df.isnull().sum()
Getting Nan values of column in dataframe
Column Name No. of Nan values
Parameter 1 0 Parameter 2 0 Parameter 3 0 Parameter 4 0 Parameter 5 0 Parameter 6 0 Parameter 7 0 Parameter 8 0 Parameter 9 0 Parameter 10 0 Parameter 11 0 Signal_Strength 0 dtype: int64
print(colored('\x1B[1mDescribing statistics of numerical features','blue'))
df.describe()
Describing statistics of numerical features
| Parameter 1 | Parameter 2 | Parameter 3 | Parameter 4 | Parameter 5 | Parameter 6 | Parameter 7 | Parameter 8 | Parameter 9 | Parameter 10 | Parameter 11 | Signal_Strength | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 |
| mean | 8.319637 | 0.527821 | 0.270976 | 2.538806 | 0.087467 | 15.874922 | 46.467792 | 0.996747 | 3.311113 | 0.658149 | 10.422983 | 5.636023 |
| std | 1.741096 | 0.179060 | 0.194801 | 1.409928 | 0.047065 | 10.460157 | 32.895324 | 0.001887 | 0.154386 | 0.169507 | 1.065668 | 0.807569 |
| min | 4.600000 | 0.120000 | 0.000000 | 0.900000 | 0.012000 | 1.000000 | 6.000000 | 0.990070 | 2.740000 | 0.330000 | 8.400000 | 3.000000 |
| 25% | 7.100000 | 0.390000 | 0.090000 | 1.900000 | 0.070000 | 7.000000 | 22.000000 | 0.995600 | 3.210000 | 0.550000 | 9.500000 | 5.000000 |
| 50% | 7.900000 | 0.520000 | 0.260000 | 2.200000 | 0.079000 | 14.000000 | 38.000000 | 0.996750 | 3.310000 | 0.620000 | 10.200000 | 6.000000 |
| 75% | 9.200000 | 0.640000 | 0.420000 | 2.600000 | 0.090000 | 21.000000 | 62.000000 | 0.997835 | 3.400000 | 0.730000 | 11.100000 | 6.000000 |
| max | 15.900000 | 1.580000 | 1.000000 | 15.500000 | 0.611000 | 72.000000 | 289.000000 | 1.003690 | 4.010000 | 2.000000 | 14.900000 | 8.000000 |
print(colored('\x1B[1mGraphical represntation of columns\n\n','blue'))
n=1
for x in df:
series = df[x]
skewness = series.skew()
if skewness > -.5 and skewness < .5 :
print('\n',x,"is Symmetrically Skewed as Skewness =",round(skewness,3))
elif skewness > .25:
print('\n',x,"is Positively Skewed towards Right side of asymmetric distribution as Skewness =",round(skewness,3))
elif skewness < -.25:
print('\n',x,"is Negatively Skewed towards Left side of asymmetric distribution as Skewness =",round(skewness,3))
plt.figure(figsize= (5,5))
n=n+1
sns.distplot(df[x])
plt.xlabel(x)
plt.show()
Graphical represntation of columns
Parameter 1 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 0.983
Parameter 2 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 0.672
Parameter 3 is Symmetrically Skewed as Skewness = 0.318
Parameter 4 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 4.541
Parameter 5 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 5.68
Parameter 6 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 1.251
Parameter 7 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 1.516
Parameter 8 is Symmetrically Skewed as Skewness = 0.071
Parameter 9 is Symmetrically Skewed as Skewness = 0.194
Parameter 10 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 2.429
Parameter 11 is Positively Skewed towards Right side of asymmetric distribution as Skewness = 0.861
Signal_Strength is Symmetrically Skewed as Skewness = 0.218
print(colored('\x1B[1mChecking for outlier','blue'))
n=1
for x in df:
df[x]=df[x].astype(float)
att = df[x]
mean = np.mean(att)
std = np.std(att)
outlier = []
outliervalue = []
for i in att:
z = (i-mean)/std
if z < -3.00 or z > 3.00:
outlier.append(i)
outliervalue.append(z)
print('\nNo. of outlier of',x,'in dataset is',len(outlier),'\nWhich are',outlier)
plt.figure(figsize= (15,20))
plt.subplot(10,2,n)
n=n+1
sns.boxplot(df[x])
plt.xlabel(x)
plt.show()
('\n')
Checking for outlier
No. of outlier of Parameter 1 in dataset is 12
Which are [15.0, 15.0, 13.8, 14.0, 13.7, 13.7, 15.6, 14.3, 15.5, 15.5, 15.6, 15.9]
No. of outlier of Parameter 2 in dataset is 10 Which are [1.13, 1.07, 1.33, 1.33, 1.09, 1.24, 1.185, 1.115, 1.58, 1.18]
No. of outlier of Parameter 3 in dataset is 1 Which are [1.0]
No. of outlier of Parameter 4 in dataset is 30 Which are [10.7, 7.3, 7.2, 7.0, 11.0, 11.0, 7.9, 7.9, 15.5, 8.3, 7.9, 8.6, 7.5, 9.0, 8.8, 8.8, 8.9, 8.1, 8.1, 8.3, 8.3, 7.8, 12.9, 13.4, 15.4, 15.4, 13.8, 13.8, 13.9, 7.8]
No. of outlier of Parameter 5 in dataset is 31 Which are [0.368, 0.341, 0.332, 0.46399999999999997, 0.401, 0.467, 0.23600000000000002, 0.61, 0.36, 0.27, 0.337, 0.263, 0.611, 0.358, 0.34299999999999997, 0.413, 0.25, 0.42200000000000004, 0.387, 0.415, 0.243, 0.24100000000000002, 0.414, 0.369, 0.40299999999999997, 0.414, 0.415, 0.415, 0.267, 0.235, 0.23]
No. of outlier of Parameter 6 in dataset is 22 Which are [52.0, 51.0, 50.0, 68.0, 68.0, 54.0, 53.0, 52.0, 51.0, 57.0, 50.0, 48.0, 48.0, 72.0, 51.0, 51.0, 52.0, 55.0, 55.0, 48.0, 48.0, 66.0]
No. of outlier of Parameter 7 in dataset is 15 Which are [148.0, 153.0, 165.0, 151.0, 149.0, 147.0, 148.0, 155.0, 151.0, 152.0, 278.0, 289.0, 160.0, 147.0, 147.0]
No. of outlier of Parameter 8 in dataset is 18 Which are [1.0032, 1.0026, 1.00315, 1.00315, 1.00315, 1.0026, 0.99064, 0.99064, 1.00289, 0.9900700000000001, 0.9900700000000001, 0.9902, 0.9908, 0.9908399999999999, 1.00369, 1.00369, 1.00242, 1.00242]
No. of outlier of Parameter 9 in dataset is 8 Which are [3.9, 3.85, 2.74, 3.9, 3.78, 3.78, 4.01, 4.01]
No. of outlier of Parameter 10 in dataset is 27 Which are [1.56, 1.28, 1.2, 1.28, 1.95, 1.22, 1.95, 1.98, 1.31, 2.0, 1.59, 1.61, 1.26, 1.36, 1.18, 1.36, 1.36, 1.17, 1.62, 1.18, 1.34, 1.17, 1.17, 1.33, 1.18, 1.17, 1.17]
No. of outlier of Parameter 11 in dataset is 8 Which are [14.0, 14.0, 14.0, 14.0, 14.9, 14.0, 14.0, 14.0]
No. of outlier of Signal_Strength in dataset is 10 Which are [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0]
print(colored('\x1B[1m COUNT PLOT OF TARGET COLUMN','blue'))
sns.countplot(x='Signal_Strength',data=df)
COUNT PLOT OF TARGET COLUMN
<AxesSubplot:xlabel='Signal_Strength', ylabel='count'>
#Getting the target column distributions
vc1=df.drop(['Signal_Strength'],axis=1)
plt.figure(figsize= (5,5))
n=1
for i in vc1:
vc1[i]=vc1[i].astype(float)
att = vc1[i]
sns.boxplot(x='Signal_Strength',y=i,data=df,palette="Set1")
plt.show()
#Getting the target column distributions
vc1=df.drop(['Signal_Strength'],axis=1)
plt.figure(figsize= (5,5))
n=1
for i in vc1:
vc1[i]=vc1[i].astype(float)
att = vc1[i]
#sns.boxplot(x='Signal_Strength',y=i,data=df,palette="Set1")
#sns.displot(penguins, x="flipper_length_mm", hue="sex", multiple="dodge")
sns.displot(hue='Signal_Strength',x=i,data=df, multiple="dodge")
plt.show()
<Figure size 360x360 with 0 Axes>
#Getting the target column distributions
vc1=df.drop(['Signal_Strength'],axis=1)
plt.figure(figsize= (5,5))
n=1
for i in vc1:
vc1[i]=vc1[i].astype(float)
att = vc1[i]
sns.jointplot(data=df,x="Signal_Strength", y=i,kind="kde")
plt.show()
<Figure size 360x360 with 0 Axes>
sns.pairplot(df)
<seaborn.axisgrid.PairGrid at 0x13fc51040>
print(colored('\x1B[1m Replacing outliers by Median Value','blue'))
n=1
for x in df :
median=df[x].median()
att = df[x]
mean = np.mean(att)
std = np.std(att)
outlier = []
outliervalue = []
for i in att:
z = (i-mean)/std
if z < -3.00 or z > 3.00:
df[x].replace(i,np.nan, inplace=True)
df[x].replace(np.nan, median, inplace=True)
Replacing outliers by Median Value
print(colored('\x1B[1m Checking duplicate rows in dataframe','blue'))
duplicate = df[df.duplicated()]
print('Duplicate Rows in dataframe are')
duplicate.shape
Checking duplicate rows in dataframe
Duplicate Rows in dataframe are
(242, 12)
print(colored('\x1B[1m Droping duplicate rows from orignal dataframe','blue'))
df=df.drop_duplicates()
Droping duplicate rows from orignal dataframe
print(colored('\x1B[1m Checking correlation','blue'))
co = df.corr()
co
Checking correlation
| Parameter 1 | Parameter 2 | Parameter 3 | Parameter 4 | Parameter 5 | Parameter 6 | Parameter 7 | Parameter 8 | Parameter 9 | Parameter 10 | Parameter 11 | Signal_Strength | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| Parameter 1 | 1.000000 | -0.265369 | 0.656997 | 0.176676 | 0.193620 | -0.133272 | -0.102142 | 0.646174 | -0.653975 | 0.201699 | -0.057846 | 0.123509 |
| Parameter 2 | -0.265369 | 1.000000 | -0.558679 | 0.063188 | 0.106243 | 0.002591 | 0.095534 | 0.016183 | 0.235277 | -0.312001 | -0.205257 | -0.352318 |
| Parameter 3 | 0.656997 | -0.558679 | 1.000000 | 0.154426 | 0.088543 | -0.045450 | 0.014787 | 0.358222 | -0.545433 | 0.324154 | 0.114771 | 0.228302 |
| Parameter 4 | 0.176676 | 0.063188 | 0.154426 | 1.000000 | 0.144377 | -0.013266 | 0.085693 | 0.332732 | -0.060779 | 0.061869 | 0.131250 | 0.039681 |
| Parameter 5 | 0.193620 | 0.106243 | 0.088543 | 0.144377 | 1.000000 | -0.066003 | 0.065543 | 0.320911 | -0.180487 | 0.047617 | -0.242817 | -0.134126 |
| Parameter 6 | -0.133272 | 0.002591 | -0.045450 | -0.013266 | -0.066003 | 1.000000 | 0.631916 | -0.019635 | 0.065255 | 0.032775 | -0.088053 | -0.069633 |
| Parameter 7 | -0.102142 | 0.095534 | 0.014787 | 0.085693 | 0.065543 | 0.631916 | 1.000000 | 0.120894 | -0.057483 | -0.041735 | -0.265658 | -0.230121 |
| Parameter 8 | 0.646174 | 0.016183 | 0.358222 | 0.332732 | 0.320911 | -0.019635 | 0.120894 | 1.000000 | -0.318802 | 0.138261 | -0.483745 | -0.169961 |
| Parameter 9 | -0.653975 | 0.235277 | -0.545433 | -0.060779 | -0.180487 | 0.065255 | -0.057483 | -0.318802 | 1.000000 | -0.087251 | 0.164439 | -0.053180 |
| Parameter 10 | 0.201699 | -0.312001 | 0.324154 | 0.061869 | 0.047617 | 0.032775 | -0.041735 | 0.138261 | -0.087251 | 1.000000 | 0.182529 | 0.357023 |
| Parameter 11 | -0.057846 | -0.205257 | 0.114771 | 0.131250 | -0.242817 | -0.088053 | -0.265658 | -0.483745 | 0.164439 | 0.182529 | 1.000000 | 0.480425 |
| Signal_Strength | 0.123509 | -0.352318 | 0.228302 | 0.039681 | -0.134126 | -0.069633 | -0.230121 | -0.169961 | -0.053180 | 0.357023 | 0.480425 | 1.000000 |
print(colored('\x1B[1m Checking correlation via heatmap','blue'))
plt.figure(figsize=(15,15))
sns.heatmap(co, annot = True, cmap= 'coolwarm', vmax=1.0, vmin=-1.0)
plt.title('Correlation between features')
plt.show()
Checking correlation via heatmap
dfc=df.copy(deep=True)
x=dfc.drop(['Signal_Strength'],axis=1)
y=dfc['Signal_Strength']
print(colored('\x1B[1m Spliting the data into train and test sets','blue'))
X_train, X_test, Y_train, Y_test = train_test_split(x,y, test_size=0.2, random_state=202)
Spliting the data into train and test sets
print(colored('\x1B[1m Scaling the data','blue'))
scaler = StandardScaler().fit(X_train)
X_train = scaler.transform(X_train)
scaler = StandardScaler().fit(X_test)
X_test = scaler.transform(X_test)
Scaling the data
print(colored('\x1B[1m Building regressor Model','blue'))
model = Sequential()
model.add(Dense(activation = 'relu', input_dim = 11, units=24, kernel_initializer='uniform'))
model.add(Dense(40,activation='relu'))
model.add(Dense(60,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(45,activation='relu'))
model.add(Dense(15,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(1,activation='linear'))
Building regressor Model
print(colored('\x1B[1m Compiling Model','blue'))
model.compile(optimizer = 'SGD', loss = ['mse','mae'])
Compiling Model
print(colored('\x1B[1m Fitting the Model','blue'))
history = model.fit(X_train,Y_train, batch_size=32, epochs=200
,validation_split=0.3)
Fitting the Model
Epoch 1/200
24/24 [==============================] - 1s 30ms/step - loss: 17.8886 - val_loss: 0.5232
Epoch 2/200
24/24 [==============================] - 0s 3ms/step - loss: 0.8112 - val_loss: 0.4016
Epoch 3/200
24/24 [==============================] - 0s 4ms/step - loss: 0.6532 - val_loss: 0.4020
Epoch 4/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5896 - val_loss: 0.4088
Epoch 5/200
24/24 [==============================] - 0s 4ms/step - loss: 0.6149 - val_loss: 0.4255
Epoch 6/200
24/24 [==============================] - 0s 4ms/step - loss: 0.6260 - val_loss: 0.4013
Epoch 7/200
24/24 [==============================] - 0s 3ms/step - loss: 0.6334 - val_loss: 0.3824
Epoch 8/200
24/24 [==============================] - 0s 3ms/step - loss: 0.6537 - val_loss: 0.3773
Epoch 9/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5284 - val_loss: 0.3948
Epoch 10/200
24/24 [==============================] - 0s 3ms/step - loss: 0.6505 - val_loss: 0.3771
Epoch 11/200
24/24 [==============================] - 0s 4ms/step - loss: 0.5797 - val_loss: 0.3818
Epoch 12/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5699 - val_loss: 0.3946
Epoch 13/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4889 - val_loss: 0.3799
Epoch 14/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5135 - val_loss: 0.3791
Epoch 15/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4836 - val_loss: 0.3945
Epoch 16/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4804 - val_loss: 0.3756
Epoch 17/200
24/24 [==============================] - 0s 4ms/step - loss: 0.5410 - val_loss: 0.3739
Epoch 18/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5541 - val_loss: 0.3940
Epoch 19/200
24/24 [==============================] - 0s 4ms/step - loss: 0.5648 - val_loss: 0.4753
Epoch 20/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4703 - val_loss: 0.3836
Epoch 21/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4701 - val_loss: 0.3740
Epoch 22/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5014 - val_loss: 0.3749
Epoch 23/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4507 - val_loss: 0.4019
Epoch 24/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4679 - val_loss: 0.3814
Epoch 25/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4938 - val_loss: 0.3732
Epoch 26/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5255 - val_loss: 0.3956
Epoch 27/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4716 - val_loss: 0.3936
Epoch 28/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4567 - val_loss: 0.3786
Epoch 29/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4906 - val_loss: 0.3789
Epoch 30/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4941 - val_loss: 0.3769
Epoch 31/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4631 - val_loss: 0.3934
Epoch 32/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4943 - val_loss: 0.4207
Epoch 33/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5073 - val_loss: 0.4128
Epoch 34/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4916 - val_loss: 0.3812
Epoch 35/200
24/24 [==============================] - 0s 4ms/step - loss: 0.5034 - val_loss: 0.3780
Epoch 36/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4560 - val_loss: 0.3784
Epoch 37/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4756 - val_loss: 0.3832
Epoch 38/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4657 - val_loss: 0.4395
Epoch 39/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4913 - val_loss: 0.3808
Epoch 40/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4092 - val_loss: 0.3820
Epoch 41/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4320 - val_loss: 0.3842
Epoch 42/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4417 - val_loss: 0.3849
Epoch 43/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4555 - val_loss: 0.4082
Epoch 44/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4663 - val_loss: 0.3841
Epoch 45/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4405 - val_loss: 0.4089
Epoch 46/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3688 - val_loss: 0.4017
Epoch 47/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4308 - val_loss: 0.3818
Epoch 48/200
24/24 [==============================] - 0s 3ms/step - loss: 0.5190 - val_loss: 0.3762
Epoch 49/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3742 - val_loss: 0.4029
Epoch 50/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4612 - val_loss: 0.3827
Epoch 51/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4030 - val_loss: 0.3846
Epoch 52/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4471 - val_loss: 0.3798
Epoch 53/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3788 - val_loss: 0.4001
Epoch 54/200
24/24 [==============================] - 0s 4ms/step - loss: 0.5024 - val_loss: 0.3966
Epoch 55/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4263 - val_loss: 0.3917
Epoch 56/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4571 - val_loss: 0.3914
Epoch 57/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4025 - val_loss: 0.4212
Epoch 58/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4364 - val_loss: 0.4209
Epoch 59/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4564 - val_loss: 0.3961
Epoch 60/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4288 - val_loss: 0.3909
Epoch 61/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4756 - val_loss: 0.4471
Epoch 62/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3776 - val_loss: 0.4354
Epoch 63/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4043 - val_loss: 0.3834
Epoch 64/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3751 - val_loss: 0.4536
Epoch 65/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4153 - val_loss: 0.4392
Epoch 66/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4302 - val_loss: 0.4067
Epoch 67/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4399 - val_loss: 0.3899
Epoch 68/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4045 - val_loss: 0.4275
Epoch 69/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4243 - val_loss: 0.4343
Epoch 70/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4538 - val_loss: 0.4799
Epoch 71/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3741 - val_loss: 0.3890
Epoch 72/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3997 - val_loss: 0.3966
Epoch 73/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3944 - val_loss: 0.4097
Epoch 74/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3967 - val_loss: 0.3883
Epoch 75/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4085 - val_loss: 0.3970
Epoch 76/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3586 - val_loss: 0.4457
Epoch 77/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3890 - val_loss: 0.4012
Epoch 78/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3712 - val_loss: 0.4199
Epoch 79/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3815 - val_loss: 0.4187
Epoch 80/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3995 - val_loss: 0.4004
Epoch 81/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3559 - val_loss: 0.4001
Epoch 82/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3877 - val_loss: 0.3970
Epoch 83/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3929 - val_loss: 0.3990
Epoch 84/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3832 - val_loss: 0.4089
Epoch 85/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3406 - val_loss: 0.4574
Epoch 86/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4198 - val_loss: 0.4037
Epoch 87/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3680 - val_loss: 0.4326
Epoch 88/200
24/24 [==============================] - 0s 4ms/step - loss: 0.4455 - val_loss: 0.4018
Epoch 89/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3621 - val_loss: 0.4530
Epoch 90/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3659 - val_loss: 0.4119
Epoch 91/200
24/24 [==============================] - 0s 5ms/step - loss: 0.3857 - val_loss: 0.4050
Epoch 92/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4252 - val_loss: 0.4023
Epoch 93/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3882 - val_loss: 0.4014
Epoch 94/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3847 - val_loss: 0.3965
Epoch 95/200
24/24 [==============================] - 0s 7ms/step - loss: 0.3861 - val_loss: 0.4032
Epoch 96/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3668 - val_loss: 0.4113
Epoch 97/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3930 - val_loss: 0.4017
Epoch 98/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3548 - val_loss: 0.4197
Epoch 99/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4306 - val_loss: 0.3989
Epoch 100/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3524 - val_loss: 0.4517
Epoch 101/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3761 - val_loss: 0.4169
Epoch 102/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3629 - val_loss: 0.4300
Epoch 103/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3385 - val_loss: 0.4003
Epoch 104/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4005 - val_loss: 0.4257
Epoch 105/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3249 - val_loss: 0.4045
Epoch 106/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3833 - val_loss: 0.4127
Epoch 107/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3373 - val_loss: 0.4091
Epoch 108/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3409 - val_loss: 0.4015
Epoch 109/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3491 - val_loss: 0.4004
Epoch 110/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3180 - val_loss: 0.4517
Epoch 111/200
24/24 [==============================] - 0s 3ms/step - loss: 0.4205 - val_loss: 0.4381
Epoch 112/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3588 - val_loss: 0.4177
Epoch 113/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3171 - val_loss: 0.4307
Epoch 114/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3596 - val_loss: 0.3972
Epoch 115/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3902 - val_loss: 0.4076
Epoch 116/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3157 - val_loss: 0.4119
Epoch 117/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3115 - val_loss: 0.4963
Epoch 118/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3600 - val_loss: 0.4153
Epoch 119/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3531 - val_loss: 0.4110
Epoch 120/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3881 - val_loss: 0.4015
Epoch 121/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3844 - val_loss: 0.4355
Epoch 122/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3108 - val_loss: 0.4159
Epoch 123/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3353 - val_loss: 0.4983
Epoch 124/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3561 - val_loss: 0.4118
Epoch 125/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3410 - val_loss: 0.4168
Epoch 126/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3609 - val_loss: 0.4138
Epoch 127/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3283 - val_loss: 0.4158
Epoch 128/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3265 - val_loss: 0.5170
Epoch 129/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3535 - val_loss: 0.4064
Epoch 130/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3349 - val_loss: 0.4189
Epoch 131/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3483 - val_loss: 0.4512
Epoch 132/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3657 - val_loss: 0.4124
Epoch 133/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3330 - val_loss: 0.4666
Epoch 134/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3150 - val_loss: 0.4092
Epoch 135/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3331 - val_loss: 0.4082
Epoch 136/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3304 - val_loss: 0.4092
Epoch 137/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2971 - val_loss: 0.4221
Epoch 138/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3264 - val_loss: 0.4217
Epoch 139/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3031 - val_loss: 0.4194
Epoch 140/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3339 - val_loss: 0.4175
Epoch 141/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2949 - val_loss: 0.4456
Epoch 142/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3490 - val_loss: 0.4267
Epoch 143/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3567 - val_loss: 0.4187
Epoch 144/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3269 - val_loss: 0.4149
Epoch 145/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3337 - val_loss: 0.4180
Epoch 146/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3509 - val_loss: 0.4941
Epoch 147/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3313 - val_loss: 0.4505
Epoch 148/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3146 - val_loss: 0.4422
Epoch 149/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3124 - val_loss: 0.4224
Epoch 150/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3492 - val_loss: 0.4171
Epoch 151/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3422 - val_loss: 0.4272
Epoch 152/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2972 - val_loss: 0.4186
Epoch 153/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3258 - val_loss: 0.4209
Epoch 154/200
24/24 [==============================] - 0s 4ms/step - loss: 0.2743 - val_loss: 0.5089
Epoch 155/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3257 - val_loss: 0.4378
Epoch 156/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2995 - val_loss: 0.4147
Epoch 157/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2808 - val_loss: 0.4691
Epoch 158/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3168 - val_loss: 0.4339
Epoch 159/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3050 - val_loss: 0.4556
Epoch 160/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2966 - val_loss: 0.5443
Epoch 161/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3118 - val_loss: 0.4256
Epoch 162/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2674 - val_loss: 0.4451
Epoch 163/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2901 - val_loss: 0.4269
Epoch 164/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2850 - val_loss: 0.4476
Epoch 165/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3107 - val_loss: 0.4411
Epoch 166/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2914 - val_loss: 0.4257
Epoch 167/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3154 - val_loss: 0.4552
Epoch 168/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3062 - val_loss: 0.4463
Epoch 169/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3076 - val_loss: 0.4392
Epoch 170/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2818 - val_loss: 0.4344
Epoch 171/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2763 - val_loss: 0.4686
Epoch 172/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3133 - val_loss: 0.4321
Epoch 173/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3176 - val_loss: 0.4249
Epoch 174/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3119 - val_loss: 0.4349
Epoch 175/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2502 - val_loss: 0.4354
Epoch 176/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3034 - val_loss: 0.4232
Epoch 177/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2828 - val_loss: 0.4468
Epoch 178/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2840 - val_loss: 0.4345
Epoch 179/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2541 - val_loss: 0.4455
Epoch 180/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2811 - val_loss: 0.4570
Epoch 181/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2850 - val_loss: 0.4400
Epoch 182/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2551 - val_loss: 0.4377
Epoch 183/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3100 - val_loss: 0.4601
Epoch 184/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2929 - val_loss: 0.4556
Epoch 185/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2668 - val_loss: 0.4412
Epoch 186/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2839 - val_loss: 0.4358
Epoch 187/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2856 - val_loss: 0.4663
Epoch 188/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2945 - val_loss: 0.4541
Epoch 189/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2755 - val_loss: 0.4702
Epoch 190/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3179 - val_loss: 0.4420
Epoch 191/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2768 - val_loss: 0.4872
Epoch 192/200
24/24 [==============================] - 0s 4ms/step - loss: 0.3084 - val_loss: 0.4422
Epoch 193/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2660 - val_loss: 0.4374
Epoch 194/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2488 - val_loss: 0.4356
Epoch 195/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2996 - val_loss: 0.4643
Epoch 196/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2639 - val_loss: 0.4446
Epoch 197/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2907 - val_loss: 0.4443
Epoch 198/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2824 - val_loss: 0.5165
Epoch 199/200
24/24 [==============================] - 0s 3ms/step - loss: 0.2584 - val_loss: 0.4364
Epoch 200/200
24/24 [==============================] - 0s 3ms/step - loss: 0.3051 - val_loss: 0.4475
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
loss = model.evaluate(X_train, Y_train,verbose=0)
print(colored('\x1B[1mTrain Loss:','red'),loss)
Evaluating model with train dataset Train Loss: 0.2788015604019165
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
loss= model.evaluate(X_test, Y_test,verbose=0)
print(colored('\x1B[1mTest Loss:','red'),loss)
Evaluating model with test dataset Test Loss: 0.42188888788223267
Train Loss: 0.278
Test Loss: 0.421
print(history.history.keys())
# "Loss"
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'val_loss'])
y_pred = model.predict(X_test)
plt.plot(Y_test, color = 'red', label = 'Real data')
plt.plot(y_pred, color = 'blue', label = 'Predicted data')
plt.title('Prediction')
plt.legend()
plt.show()
print(colored('\x1B[1m Tuning hperparameters to get best parameters','blue'))
print(colored('\x1B[1m Tuning hperparameters to get best optimizer among\n1. Adam\n2. SGD','blue'))
def Build_Reg(optimizer):
model = Sequential()
model.add(Dense(activation = 'relu', input_dim = 11, units=24,
kernel_initializer='uniform'))
model.add(Dense(40,activation='relu'))
model.add(Dense(60,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(45,activation='relu'))
model.add(Dense(15,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(1,activation='linear'))
model.compile(optimizer = optimizer,loss=['mse'])
return model
Tuning hperparameters to get best parameters Tuning hperparameters to get best optimizer among 1. Adam 2. SGD
print(colored('\x1B[1m Initializing parameters for best optimizer','blue'))
classifier = KerasClassifier(build_fn = Build_Reg)
params1 = {'batch_size': [25, 32],
'epochs': [100, 200],
'optimizer': ['adam', 'SGD']}
Initializing parameters for best optimizer
print(colored('\x1B[1m Finding best optimizer with help of GRID_SEARCH','blue'))
grid_search1 = GridSearchCV(estimator = classifier,
param_grid = params1,
scoring = 'neg_mean_squared_error',
cv = 10)
Finding best optimizer with help of GRID_SEARCH
grid_search1 = grid_search1.fit(X_train, Y_train,verbose = 0)
best_params1 = grid_search1.best_params_
best_loss1 = grid_search1.best_score_
print(colored('\x1B[1m Best Optimizer is'),list(best_params1.values())[2])
print('Best Parameters after tuning: {}'.format(best_params1))
print('Best loss after tuning: {}'.format(best_loss1))
Best Optimizer is SGD
Best Parameters after tuning: {'batch_size': 25, 'epochs': 100, 'optimizer': 'SGD'}
Best loss after tuning: -1.0396704043493035
bz=list(best_params1.values())[0]
ep=list(best_params1.values())[1]
op=list(best_params1.values())[2]
print(colored('\x1B[1m Tuning hperparameters to get best Learning_Rate and Momentum','blue'))
def model_LR(learn_rate=0.01, momentum=0):
model = Sequential()
model.add(Dense(activation = 'relu', input_dim = 11,units=24,
kernel_initializer='uniform'))
model.add(Dense(40,activation='relu'))
model.add(Dense(60,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(45,activation='relu'))
model.add(Dense(15,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(1,activation='linear'))
if op == str('adam'):
optimizer = tf.keras.optimizers.Adam(learning_rate=learn_rate)
else:
optimizer = tf.keras.optimizers.SGD(learning_rate=learn_rate, momentum=momentum)
model.compile(optimizer = optimizer,loss=['mse'])
return model
Tuning hperparameters to get best Learning_Rate and Momentum
model = KerasClassifier(build_fn=model_LR, epochs=ep, batch_size=bz, verbose=0)
print(colored('\x1B[1m Initializing parameters for best Learning_Rate and Momentum','blue'))
learn_rate = [0.001, 0.01, 0.1, 0.2, 0.3]
momentum = [0.0, 0.2, 0.4, 0.6, 0.8, 0.9]
params2 = dict(learn_rate=learn_rate, momentum=momentum)
Initializing parameters for best Learning_Rate and Momentum
print(colored('\x1B[1m Finding best Learning_Rate with help of RANDOM_SEARCH','blue'))
grid2 = RandomizedSearchCV(estimator = model,
param_distributions = params2,cv = 10,
scoring = 'neg_mean_squared_error')
Finding best Learning_Rate with help of RANDOM_SEARCH
grid_search2 = grid2.fit(X_train, Y_train,verbose = 0)
best_params2 = grid_search2.best_params_
best_loss2 = grid_search2.best_score_
print(colored('\x1B[1m Best Momentum is'),list(best_params2.values())[0])
print(colored('\x1B[1m Best Learning_Rate is'),list(best_params2.values())[1])
print('Best loss after tuning: {}'.format(best_loss2))
Best Momentum is 0.9 Best Learning_Rate is 0.001 Best loss after tuning: -1.0396704043493035
m=list(best_params2.values())[0]
lr=list(best_params2.values())[1]
if op == str('adam'):
opt = tf.keras.optimizers.Adam(learning_rate=lr)
else:
opt = tf.keras.optimizers.SGD(learning_rate=lr, momentum=m)
print(colored('\x1B[1m Tuning hperparameters to get best No. of Neurons','blue'))
def Model_N(neurons=1):
model = Sequential()
model.add(Dense(activation = 'relu', input_dim = 11, units=neurons,
kernel_initializer='uniform'))
model.add(Dense(neurons,activation='relu'))
model.add(Dense(neurons,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(neurons,activation='relu'))
model.add(Dense(neurons,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(1,activation='linear'))
model.compile(optimizer = opt,loss=['mse'])
return model
Tuning hperparameters to get best No. of Neurons
model = KerasClassifier(build_fn = Model_N, epochs=ep, batch_size=bz, verbose=0)
print(colored('\x1B[1m Initializing parameters for best No. of Neurons','blue'))
neurons = [ 5, 10, 15, 20, 25, 30]
params3 = dict(neurons=neurons)
Initializing parameters for best No. of Neurons
print(colored('\x1B[1m Finding best No. of Neurons with help of RANDOM_SEARCH','blue'))
grid3 = RandomizedSearchCV(estimator = model,
param_distributions = params3,cv = 10,
scoring = 'neg_mean_squared_error')
Finding best No. of Neurons with help of RANDOM_SEARCH
grid_search3 = grid3.fit(X_train, Y_train,verbose = 0)
best_params3 = grid_search3.best_params_
best_loss3 = grid_search3.best_score_
print(colored('\x1B[1m Best No. of Neurons is'),list(best_params3.values())[0])
print('Best loss after tuning: {}'.format(best_loss3))
Best No. of Neurons is 30
Best loss after tuning: -1.0387529731566427
neurons=list(best_params3.values())[0]
print(colored('\x1B[1m Tuning hperparameters to get best No. of Dropout_Rate','blue'))
def Model_D(dropout_rate):
model = Sequential()
model.add(Dense(neurons, activation='relu',input_dim = 11,
kernel_initializer='uniform'))
model.add(Dense(neurons, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(neurons, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'linear'))
model.compile(optimizer = opt,loss=['mse'])
return model
Tuning hperparameters to get best No. of Dropout_Rate
model = KerasClassifier(build_fn=Model_D, epochs=ep, batch_size=bz, verbose=0)
print(colored('\x1B[1m Initializing parameters for best No. of Dropout_Rate','blue'))
dropout_rate = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
params4 = dict(dropout_rate=dropout_rate)
Initializing parameters for best No. of Dropout_Rate
print(colored('\x1B[1m Finding best No. of Dropout_Rate with help of RANDOM_SEARCH','blue'))
grid4 = RandomizedSearchCV(estimator = model,
param_distributions = params4,cv = 10,
scoring = 'neg_mean_squared_error')
Finding best No. of Dropout_Rate with help of RANDOM_SEARCH
grid_search4 = grid4.fit(X_train, Y_train,verbose = 0)
best_params4 = grid_search4.best_params_
best_loss4 = grid_search4.best_score_
print(colored('\x1B[1m Best No. of Dropout_Rate is'),list(best_params4.values())[0])
print('Best Parameters after tuning: {}'.format(best_params4))
print('Best loss after tuning: {}'.format(best_loss4))
Best No. of Dropout_Rate is 0.0
Best Parameters after tuning: {'dropout_rate': 0.0}
Best loss after tuning: -1.0396704043493035
dropout_rate=list(best_params4.values())[0]
print(colored('\x1B[1m Rebuilding Model with all best Parameters','blue'))
model = Sequential()
model.add(Dense(neurons, activation='relu',input_dim = 11,
kernel_initializer='uniform'))
model.add(Dense(neurons, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(neurons, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'linear'))
Rebuilding Model with all best Parameters
print(colored('\x1B[1m Compiling Model','blue'))
model.compile(optimizer = opt ,loss=['mse'])
Compiling Model
print(colored('\x1B[1m Fitting the Model','blue'))
history = model.fit(X_train, Y_train, epochs=ep,batch_size=bz, validation_split=0.3)
Fitting the Model
Epoch 1/100
31/31 [==============================] - 1s 10ms/step - loss: 27.9029 - val_loss: 12.1246
Epoch 2/100
31/31 [==============================] - 0s 4ms/step - loss: 8.9078 - val_loss: 2.0862
Epoch 3/100
31/31 [==============================] - 0s 5ms/step - loss: 1.4406 - val_loss: 0.6181
Epoch 4/100
31/31 [==============================] - 0s 4ms/step - loss: 0.6499 - val_loss: 0.6205
Epoch 5/100
31/31 [==============================] - 0s 4ms/step - loss: 0.6237 - val_loss: 0.6165
Epoch 6/100
31/31 [==============================] - 0s 4ms/step - loss: 0.6110 - val_loss: 0.6165
Epoch 7/100
31/31 [==============================] - 0s 3ms/step - loss: 0.6663 - val_loss: 0.6159
Epoch 8/100
31/31 [==============================] - 0s 4ms/step - loss: 0.6503 - val_loss: 0.6157
Epoch 9/100
31/31 [==============================] - 0s 5ms/step - loss: 0.6449 - val_loss: 0.6151
Epoch 10/100
31/31 [==============================] - 0s 3ms/step - loss: 0.6156 - val_loss: 0.6150
Epoch 11/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5989 - val_loss: 0.6144
Epoch 12/100
31/31 [==============================] - 0s 3ms/step - loss: 0.6141 - val_loss: 0.6136
Epoch 13/100
31/31 [==============================] - 0s 4ms/step - loss: 0.6232 - val_loss: 0.6131
Epoch 14/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5978 - val_loss: 0.6126
Epoch 15/100
31/31 [==============================] - 0s 6ms/step - loss: 0.6450 - val_loss: 0.6113
Epoch 16/100
31/31 [==============================] - 0s 6ms/step - loss: 0.5947 - val_loss: 0.6102
Epoch 17/100
31/31 [==============================] - 0s 6ms/step - loss: 0.6453 - val_loss: 0.6093
Epoch 18/100
31/31 [==============================] - 0s 5ms/step - loss: 0.6636 - val_loss: 0.6074
Epoch 19/100
31/31 [==============================] - 0s 6ms/step - loss: 0.6158 - val_loss: 0.6054
Epoch 20/100
31/31 [==============================] - 0s 4ms/step - loss: 0.5856 - val_loss: 0.6029
Epoch 21/100
31/31 [==============================] - 0s 3ms/step - loss: 0.6334 - val_loss: 0.6000
Epoch 22/100
31/31 [==============================] - 0s 3ms/step - loss: 0.6038 - val_loss: 0.5961
Epoch 23/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5895 - val_loss: 0.5913
Epoch 24/100
31/31 [==============================] - 0s 4ms/step - loss: 0.5585 - val_loss: 0.5852
Epoch 25/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5818 - val_loss: 0.5776
Epoch 26/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5251 - val_loss: 0.5688
Epoch 27/100
31/31 [==============================] - 0s 4ms/step - loss: 0.5219 - val_loss: 0.5555
Epoch 28/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5132 - val_loss: 0.5382
Epoch 29/100
31/31 [==============================] - 0s 4ms/step - loss: 0.5616 - val_loss: 0.5182
Epoch 30/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5669 - val_loss: 0.4941
Epoch 31/100
31/31 [==============================] - 0s 3ms/step - loss: 0.5107 - val_loss: 0.4699
Epoch 32/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4743 - val_loss: 0.4442
Epoch 33/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4411 - val_loss: 0.4236
Epoch 34/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4439 - val_loss: 0.4086
Epoch 35/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4117 - val_loss: 0.3991
Epoch 36/100
31/31 [==============================] - 0s 3ms/step - loss: 0.3881 - val_loss: 0.3925
Epoch 37/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4050 - val_loss: 0.3897
Epoch 38/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4146 - val_loss: 0.3887
Epoch 39/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4260 - val_loss: 0.3875
Epoch 40/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3907 - val_loss: 0.3868
Epoch 41/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4115 - val_loss: 0.3859
Epoch 42/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4151 - val_loss: 0.3853
Epoch 43/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4221 - val_loss: 0.3849
Epoch 44/100
31/31 [==============================] - 0s 4ms/step - loss: 0.4151 - val_loss: 0.3842
Epoch 45/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3970 - val_loss: 0.3848
Epoch 46/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4346 - val_loss: 0.3827
Epoch 47/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4330 - val_loss: 0.3816
Epoch 48/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4054 - val_loss: 0.3818
Epoch 49/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4017 - val_loss: 0.3800
Epoch 50/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4234 - val_loss: 0.3810
Epoch 51/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4011 - val_loss: 0.3797
Epoch 52/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3937 - val_loss: 0.3787
Epoch 53/100
31/31 [==============================] - 0s 3ms/step - loss: 0.3931 - val_loss: 0.3794
Epoch 54/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3959 - val_loss: 0.3789
Epoch 55/100
31/31 [==============================] - 0s 6ms/step - loss: 0.4193 - val_loss: 0.3763
Epoch 56/100
31/31 [==============================] - 0s 5ms/step - loss: 0.4026 - val_loss: 0.3774
Epoch 57/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3902 - val_loss: 0.3760
Epoch 58/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3693 - val_loss: 0.3742
Epoch 59/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3882 - val_loss: 0.3760
Epoch 60/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3728 - val_loss: 0.3742
Epoch 61/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3940 - val_loss: 0.3730
Epoch 62/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3877 - val_loss: 0.3724
Epoch 63/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3911 - val_loss: 0.3756
Epoch 64/100
31/31 [==============================] - 0s 4ms/step - loss: 0.4127 - val_loss: 0.3719
Epoch 65/100
31/31 [==============================] - 0s 6ms/step - loss: 0.4116 - val_loss: 0.3716
Epoch 66/100
31/31 [==============================] - 0s 6ms/step - loss: 0.4039 - val_loss: 0.3703
Epoch 67/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3586 - val_loss: 0.3693
Epoch 68/100
31/31 [==============================] - 0s 5ms/step - loss: 0.4062 - val_loss: 0.3683
Epoch 69/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3716 - val_loss: 0.3690
Epoch 70/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3720 - val_loss: 0.3706
Epoch 71/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3788 - val_loss: 0.3679
Epoch 72/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4102 - val_loss: 0.3670
Epoch 73/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3991 - val_loss: 0.3689
Epoch 74/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3897 - val_loss: 0.3682
Epoch 75/100
31/31 [==============================] - 0s 5ms/step - loss: 0.4175 - val_loss: 0.3692
Epoch 76/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3597 - val_loss: 0.3663
Epoch 77/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3847 - val_loss: 0.3664
Epoch 78/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3587 - val_loss: 0.3673
Epoch 79/100
31/31 [==============================] - 0s 9ms/step - loss: 0.3781 - val_loss: 0.3676
Epoch 80/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3880 - val_loss: 0.3683
Epoch 81/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3715 - val_loss: 0.3672
Epoch 82/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3813 - val_loss: 0.3669
Epoch 83/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3659 - val_loss: 0.3689
Epoch 84/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3572 - val_loss: 0.3699
Epoch 85/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3731 - val_loss: 0.3668
Epoch 86/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3635 - val_loss: 0.3685
Epoch 87/100
31/31 [==============================] - 0s 4ms/step - loss: 0.4170 - val_loss: 0.3680
Epoch 88/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3874 - val_loss: 0.3678
Epoch 89/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3936 - val_loss: 0.3669
Epoch 90/100
31/31 [==============================] - 0s 7ms/step - loss: 0.3401 - val_loss: 0.3714
Epoch 91/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3726 - val_loss: 0.3699
Epoch 92/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3795 - val_loss: 0.3685
Epoch 93/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3919 - val_loss: 0.3688
Epoch 94/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3354 - val_loss: 0.3705
Epoch 95/100
31/31 [==============================] - 0s 6ms/step - loss: 0.4094 - val_loss: 0.3695
Epoch 96/100
31/31 [==============================] - 0s 6ms/step - loss: 0.3602 - val_loss: 0.3682
Epoch 97/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3510 - val_loss: 0.3740
Epoch 98/100
31/31 [==============================] - 0s 5ms/step - loss: 0.3571 - val_loss: 0.3683
Epoch 99/100
31/31 [==============================] - 0s 4ms/step - loss: 0.3725 - val_loss: 0.3754
Epoch 100/100
31/31 [==============================] - 0s 3ms/step - loss: 0.4171 - val_loss: 0.3699
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
loss= model.evaluate(X_train, Y_train,verbose=0)
print(colored('\x1B[1mTrain Loss:','red'),(loss))
Evaluating model with train dataset Train Loss: 0.3643158972263336
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
loss= model.evaluate(X_test, Y_test,verbose=0)
print(colored('\x1B[1mTest Loss:','red'),(loss))
Evaluating model with test dataset Test Loss: 0.37055355310440063
lr = 2e-2
Lambda = 1e-4
d=1e-6
m=0.9
print(colored('\x1B[1m Building the Model','blue'))
rg_model = Sequential()
rg_model.add(Dense(activation = 'relu', input_dim = 11, units=24, kernel_initializer='uniform'))
rg_model.add(Dense(40,activation='relu'))
rg_model.add(Dense(60,activation='relu'))
rg_model.add(Dropout(.02))
rg_model.add(Dense(45,activation='relu'))
rg_model.add(Dense(15,activation='relu'))
rg_model.add(Dropout(.02))
rg_model.add(Dense(1,activation='linear',kernel_regularizer=regularizers.l2(Lambda)))
Building the Model
if op == str('adam'):
opt = tf.keras.optimizers.Adam(learning_rate=lr,decay=d)
else:
opt = tf.keras.optimizers.SGD(learning_rate=lr, momentum=m, decay=d)
print(colored('\x1B[1m Compiling Model','blue'))
rg_model.compile(optimizer = opt ,loss=['mae'])
Compiling Model
print(colored('\x1B[1m Fitting the Model','blue'))
Fitting the Model
history = rg_model.fit(X_train, Y_train, epochs=ep,batch_size=bz, validation_split=0.3)
Epoch 1/100 31/31 [==============================] - 2s 17ms/step - loss: 3.9730 - val_loss: 0.6715 Epoch 2/100 31/31 [==============================] - 0s 4ms/step - loss: 0.6606 - val_loss: 0.6488 Epoch 3/100 31/31 [==============================] - 0s 5ms/step - loss: 0.6251 - val_loss: 0.5589 Epoch 4/100 31/31 [==============================] - 0s 5ms/step - loss: 0.5487 - val_loss: 0.5704 Epoch 5/100 31/31 [==============================] - 0s 6ms/step - loss: 0.6207 - val_loss: 0.5901 Epoch 6/100 31/31 [==============================] - 0s 5ms/step - loss: 0.5589 - val_loss: 0.4659 Epoch 7/100 31/31 [==============================] - 0s 6ms/step - loss: 0.5232 - val_loss: 0.5360 Epoch 8/100 31/31 [==============================] - 0s 5ms/step - loss: 0.5194 - val_loss: 0.4594 Epoch 9/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4836 - val_loss: 0.5631 Epoch 10/100 31/31 [==============================] - 0s 7ms/step - loss: 0.5175 - val_loss: 0.4679 Epoch 11/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4888 - val_loss: 0.4743 Epoch 12/100 31/31 [==============================] - 0s 7ms/step - loss: 0.5169 - val_loss: 0.4598 Epoch 13/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4593 - val_loss: 0.4445 Epoch 14/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4858 - val_loss: 0.4655 Epoch 15/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4700 - val_loss: 0.4553 Epoch 16/100 31/31 [==============================] - 0s 8ms/step - loss: 0.4327 - val_loss: 0.4697 Epoch 17/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4527 - val_loss: 0.4355 Epoch 18/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4571 - val_loss: 0.4433 Epoch 19/100 31/31 [==============================] - 0s 7ms/step - loss: 0.4564 - val_loss: 0.5255 Epoch 20/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4889 - val_loss: 0.4599 Epoch 21/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4359 - val_loss: 0.5070 Epoch 22/100 31/31 [==============================] - 0s 4ms/step - loss: 0.4087 - val_loss: 0.5383 Epoch 23/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4565 - val_loss: 0.4871 Epoch 24/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4263 - val_loss: 0.4511 Epoch 25/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4262 - val_loss: 0.4459 Epoch 26/100 31/31 [==============================] - 0s 7ms/step - loss: 0.4405 - val_loss: 0.4623 Epoch 27/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3978 - val_loss: 0.4607 Epoch 28/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4069 - val_loss: 0.4659 Epoch 29/100 31/31 [==============================] - 0s 8ms/step - loss: 0.4139 - val_loss: 0.4775 Epoch 30/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4573 - val_loss: 0.4661 Epoch 31/100 31/31 [==============================] - 0s 6ms/step - loss: 0.3875 - val_loss: 0.4542 Epoch 32/100 31/31 [==============================] - 0s 5ms/step - loss: 0.4050 - val_loss: 0.4868 Epoch 33/100 31/31 [==============================] - 0s 6ms/step - loss: 0.3839 - val_loss: 0.4659 Epoch 34/100 31/31 [==============================] - 0s 8ms/step - loss: 0.3577 - val_loss: 0.4765 Epoch 35/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3640 - val_loss: 0.4502 Epoch 36/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3949 - val_loss: 0.4953 Epoch 37/100 31/31 [==============================] - 0s 9ms/step - loss: 0.4164 - val_loss: 0.4631 Epoch 38/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4076 - val_loss: 0.4788 Epoch 39/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3960 - val_loss: 0.4506 Epoch 40/100 31/31 [==============================] - 0s 3ms/step - loss: 0.4203 - val_loss: 0.4494 Epoch 41/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3795 - val_loss: 0.4739 Epoch 42/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3863 - val_loss: 0.4532 Epoch 43/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3780 - val_loss: 0.4793 Epoch 44/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3993 - val_loss: 0.4597 Epoch 45/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3482 - val_loss: 0.4460 Epoch 46/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3381 - val_loss: 0.4859 Epoch 47/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3835 - val_loss: 0.4385 Epoch 48/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3356 - val_loss: 0.4314 Epoch 49/100 31/31 [==============================] - 0s 6ms/step - loss: 0.3394 - val_loss: 0.4630 Epoch 50/100 31/31 [==============================] - 0s 6ms/step - loss: 0.4264 - val_loss: 0.4623 Epoch 51/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3660 - val_loss: 0.4604 Epoch 52/100 31/31 [==============================] - 0s 6ms/step - loss: 0.3517 - val_loss: 0.4452 Epoch 53/100 31/31 [==============================] - 0s 6ms/step - loss: 0.3340 - val_loss: 0.4659 Epoch 54/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3338 - val_loss: 0.5631 Epoch 55/100 31/31 [==============================] - 0s 5ms/step - loss: 0.3648 - val_loss: 0.4527 Epoch 56/100 31/31 [==============================] - 0s 7ms/step - loss: 0.3389 - val_loss: 0.4491 Epoch 57/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3465 - val_loss: 0.4613 Epoch 58/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3498 - val_loss: 0.4459 Epoch 59/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3055 - val_loss: 0.4599 Epoch 60/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3655 - val_loss: 0.4479 Epoch 61/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3373 - val_loss: 0.4393 Epoch 62/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3368 - val_loss: 0.4470 Epoch 63/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3237 - val_loss: 0.4618 Epoch 64/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2993 - val_loss: 0.4507 Epoch 65/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3336 - val_loss: 0.4820 Epoch 66/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3122 - val_loss: 0.4685 Epoch 67/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3194 - val_loss: 0.4632 Epoch 68/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3247 - val_loss: 0.4298 Epoch 69/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3353 - val_loss: 0.4649 Epoch 70/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3077 - val_loss: 0.4515 Epoch 71/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3207 - val_loss: 0.4539 Epoch 72/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3089 - val_loss: 0.4477 Epoch 73/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3084 - val_loss: 0.4721 Epoch 74/100 31/31 [==============================] - 0s 4ms/step - loss: 0.2971 - val_loss: 0.4572 Epoch 75/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3250 - val_loss: 0.4750 Epoch 76/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3071 - val_loss: 0.4459 Epoch 77/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3125 - val_loss: 0.4455 Epoch 78/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2880 - val_loss: 0.4492 Epoch 79/100 31/31 [==============================] - 0s 4ms/step - loss: 0.2878 - val_loss: 0.4489 Epoch 80/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2862 - val_loss: 0.4684 Epoch 81/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3320 - val_loss: 0.4613 Epoch 82/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2999 - val_loss: 0.4783 Epoch 83/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3015 - val_loss: 0.4698 Epoch 84/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3457 - val_loss: 0.4663 Epoch 85/100 31/31 [==============================] - 0s 3ms/step - loss: 0.3061 - val_loss: 0.4441 Epoch 86/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2649 - val_loss: 0.4568 Epoch 87/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2834 - val_loss: 0.4817 Epoch 88/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3064 - val_loss: 0.4598 Epoch 89/100 31/31 [==============================] - 0s 4ms/step - loss: 0.2550 - val_loss: 0.4565 Epoch 90/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3120 - val_loss: 0.4499 Epoch 91/100 31/31 [==============================] - 0s 4ms/step - loss: 0.2857 - val_loss: 0.4409 Epoch 92/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2970 - val_loss: 0.4493 Epoch 93/100 31/31 [==============================] - 0s 3ms/step - loss: 0.2683 - val_loss: 0.4999 Epoch 94/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3002 - val_loss: 0.4648 Epoch 95/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3366 - val_loss: 0.4744 Epoch 96/100 31/31 [==============================] - 0s 4ms/step - loss: 0.3652 - val_loss: 0.4549 Epoch 97/100 31/31 [==============================] - 0s 5ms/step - loss: 0.2909 - val_loss: 0.4571 Epoch 98/100 31/31 [==============================] - 0s 6ms/step - loss: 0.2685 - val_loss: 0.4744 Epoch 99/100 31/31 [==============================] - 0s 5ms/step - loss: 0.2791 - val_loss: 0.4750 Epoch 100/100 31/31 [==============================] - 0s 5ms/step - loss: 0.2724 - val_loss: 0.4781
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
rg_model.fit(X_train, Y_train, epochs=ep, batch_size=bz, verbose= 0)
loss1= rg_model.evaluate(X_train, Y_train, verbose=0)
Evaluating model with train dataset
print(colored('\x1B[1mTrain Loss:','red'),(loss1))
Train Loss: 0.24150460958480835
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
Evaluating model with test dataset
rg_model.fit(X_test, Y_test, epochs=ep, batch_size=bz, verbose= 1)
loss= rg_model.evaluate(X_test, Y_test, verbose=0)
Epoch 1/100 11/11 [==============================] - 0s 3ms/step - loss: 0.4836 Epoch 2/100 11/11 [==============================] - 0s 3ms/step - loss: 0.4258 Epoch 3/100 11/11 [==============================] - 0s 3ms/step - loss: 0.4220 Epoch 4/100 11/11 [==============================] - 0s 3ms/step - loss: 0.3923 Epoch 5/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3705 Epoch 6/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3674 Epoch 7/100 11/11 [==============================] - 0s 3ms/step - loss: 0.3568 Epoch 8/100 11/11 [==============================] - 0s 8ms/step - loss: 0.3481 Epoch 9/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3297 Epoch 10/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3229 Epoch 11/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3063 Epoch 12/100 11/11 [==============================] - 0s 3ms/step - loss: 0.3056 Epoch 13/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3116 Epoch 14/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2980 Epoch 15/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2874 Epoch 16/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2900 Epoch 17/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2821 Epoch 18/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2752 Epoch 19/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2853 Epoch 20/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2833 Epoch 21/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2833 Epoch 22/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3062 Epoch 23/100 11/11 [==============================] - 0s 2ms/step - loss: 0.3013 Epoch 24/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2936 Epoch 25/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2817 Epoch 26/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2785 Epoch 27/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2827 Epoch 28/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2757 Epoch 29/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2821 Epoch 30/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2943 Epoch 31/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2908 Epoch 32/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2769 Epoch 33/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2742 Epoch 34/100 11/11 [==============================] - 0s 3ms/step - loss: 0.3027 Epoch 35/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2832 Epoch 36/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2770 Epoch 37/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2782 Epoch 38/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2787 Epoch 39/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2666 Epoch 40/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2792 Epoch 41/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2917 Epoch 42/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2667 Epoch 43/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2730 Epoch 44/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2704 Epoch 45/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2791 Epoch 46/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2683 Epoch 47/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2746 Epoch 48/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2704 Epoch 49/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2786 Epoch 50/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2620 Epoch 51/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2783 Epoch 52/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2806 Epoch 53/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2709 Epoch 54/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2797 Epoch 55/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2701 Epoch 56/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2651 Epoch 57/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2747 Epoch 58/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2853 Epoch 59/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2900 Epoch 60/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2808 Epoch 61/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2761 Epoch 62/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2787 Epoch 63/100 11/11 [==============================] - 0s 1ms/step - loss: 0.3085 Epoch 64/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2774 Epoch 65/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2670 Epoch 66/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2714 Epoch 67/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2688 Epoch 68/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2738 Epoch 69/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2620 Epoch 70/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2584 Epoch 71/100 11/11 [==============================] - 0s 3ms/step - loss: 0.2568 Epoch 72/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2630 Epoch 73/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2664 Epoch 74/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2651 Epoch 75/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2570 Epoch 76/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2598 Epoch 77/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2717 Epoch 78/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2699 Epoch 79/100 11/11 [==============================] - 0s 4ms/step - loss: 0.2793 Epoch 80/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2742 Epoch 81/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2590 Epoch 82/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2852 Epoch 83/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2618 Epoch 84/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2662 Epoch 85/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2597 Epoch 86/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2636 Epoch 87/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2588 Epoch 88/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2627 Epoch 89/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2610 Epoch 90/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2573 Epoch 91/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2562 Epoch 92/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2438 Epoch 93/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2557 Epoch 94/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2589 Epoch 95/100 11/11 [==============================] - 0s 1ms/step - loss: 0.2478 Epoch 96/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2434 Epoch 97/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2536 Epoch 98/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2523 Epoch 99/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2412 Epoch 100/100 11/11 [==============================] - 0s 2ms/step - loss: 0.2547
print(colored('\x1B[1mTest Loss:','red'),(loss))
Test Loss: 0.2353844940662384
print(colored('\x1B[1mSaving Model:','blue'))
rg_model.save('NN Regressor')
Saving Model:
INFO:tensorflow:Assets written to: NN Regressor/assets
dfc=df.copy(deep=True)
x=dfc.drop(['Signal_Strength'],axis=1)
y=dfc['Signal_Strength']
X_train, X_test, Y_train, Y_test = train_test_split(x,y, test_size=0.2, random_state= 202)
print(colored('\x1B[1m Scaling the data','blue'))
scaler = StandardScaler().fit(X_train)
X_train = scaler.transform(X_train)
scaler = StandardScaler().fit(X_test)
X_test = scaler.transform(X_test)
Scaling the data
print(colored('\x1B[1m Converting target column to vector by one-hot encoding','blue'))
from keras.utils import np_utils
Y_train = np_utils.to_categorical(Y_train)
Y_test = np_utils.to_categorical(Y_test)
Converting target column to vector by one-hot encoding
print(colored('\x1B[1m Building classifier model','blue'))
model = Sequential()
model.add(keras.layers.Flatten(input_shape=(11,)))
model.add(Dense(50))
model.add(BatchNormalization())
model.add(Dropout(.02))
model.add(Activation('relu'))
model.add(Dense(50))
model.add(BatchNormalization())
model.add(Dropout(.02))
model.add(Activation('relu'))
model.add(Dense(50))
model.add(BatchNormalization())
model.add(Dropout(.02))
model.add(Activation('relu'))
model.add(Dense(50))
model.add(BatchNormalization())
model.add(Dropout(.02))
model.add(Activation('relu'))
model.add(Dense(9))
model.add(Activation('softmax'))
Building classifier model
print(colored('\x1B[1m Compiling Model','blue'))
model.compile(loss='categorical_crossentropy',optimizer='SGD',metrics=['accuracy'])
Compiling Model
print(colored('\x1B[1m Fitting the Model','blue'))
Fitting the Model
model.fit(X_train,Y_train, batch_size=32, epochs=200,
validation_data=(X_test, Y_test))
Epoch 1/200 34/34 [==============================] - 2s 12ms/step - loss: 2.1347 - accuracy: 0.1731 - val_loss: 1.9835 - val_accuracy: 0.4559 Epoch 2/200 34/34 [==============================] - 0s 4ms/step - loss: 1.6158 - accuracy: 0.4045 - val_loss: 1.7971 - val_accuracy: 0.4853 Epoch 3/200 34/34 [==============================] - 0s 4ms/step - loss: 1.3589 - accuracy: 0.4904 - val_loss: 1.6092 - val_accuracy: 0.5331 Epoch 4/200 34/34 [==============================] - 0s 4ms/step - loss: 1.2023 - accuracy: 0.5715 - val_loss: 1.4506 - val_accuracy: 0.5515 Epoch 5/200 34/34 [==============================] - 0s 5ms/step - loss: 1.1220 - accuracy: 0.5802 - val_loss: 1.3254 - val_accuracy: 0.5551 Epoch 6/200 34/34 [==============================] - 0s 4ms/step - loss: 1.0758 - accuracy: 0.5786 - val_loss: 1.2354 - val_accuracy: 0.5809 Epoch 7/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9988 - accuracy: 0.6116 - val_loss: 1.1711 - val_accuracy: 0.5772 Epoch 8/200 34/34 [==============================] - 0s 4ms/step - loss: 1.0023 - accuracy: 0.6007 - val_loss: 1.1259 - val_accuracy: 0.6029 Epoch 9/200 34/34 [==============================] - 0s 4ms/step - loss: 1.0471 - accuracy: 0.6001 - val_loss: 1.0949 - val_accuracy: 0.6029 Epoch 10/200 34/34 [==============================] - 0s 3ms/step - loss: 0.9960 - accuracy: 0.6128 - val_loss: 1.0699 - val_accuracy: 0.5993 Epoch 11/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9838 - accuracy: 0.5958 - val_loss: 1.0514 - val_accuracy: 0.6066 Epoch 12/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9496 - accuracy: 0.5941 - val_loss: 1.0446 - val_accuracy: 0.6066 Epoch 13/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9630 - accuracy: 0.6051 - val_loss: 1.0374 - val_accuracy: 0.5919 Epoch 14/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9628 - accuracy: 0.6146 - val_loss: 1.0340 - val_accuracy: 0.6066 Epoch 15/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9368 - accuracy: 0.5859 - val_loss: 1.0218 - val_accuracy: 0.6066 Epoch 16/200 34/34 [==============================] - 0s 5ms/step - loss: 0.9291 - accuracy: 0.6067 - val_loss: 1.0191 - val_accuracy: 0.5919 Epoch 17/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9197 - accuracy: 0.5954 - val_loss: 1.0156 - val_accuracy: 0.6029 Epoch 18/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8439 - accuracy: 0.6281 - val_loss: 1.0106 - val_accuracy: 0.6029 Epoch 19/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9147 - accuracy: 0.6216 - val_loss: 1.0094 - val_accuracy: 0.6213 Epoch 20/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8635 - accuracy: 0.6405 - val_loss: 1.0045 - val_accuracy: 0.6176 Epoch 21/200 34/34 [==============================] - 0s 6ms/step - loss: 0.8606 - accuracy: 0.6374 - val_loss: 1.0054 - val_accuracy: 0.6103 Epoch 22/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8763 - accuracy: 0.6081 - val_loss: 1.0072 - val_accuracy: 0.5956 Epoch 23/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8707 - accuracy: 0.6424 - val_loss: 1.0079 - val_accuracy: 0.6066 Epoch 24/200 34/34 [==============================] - 0s 4ms/step - loss: 0.9073 - accuracy: 0.6188 - val_loss: 1.0039 - val_accuracy: 0.6140 Epoch 25/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8790 - accuracy: 0.6223 - val_loss: 1.0056 - val_accuracy: 0.5993 Epoch 26/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8626 - accuracy: 0.6130 - val_loss: 1.0080 - val_accuracy: 0.6029 Epoch 27/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8495 - accuracy: 0.6351 - val_loss: 1.0093 - val_accuracy: 0.6103 Epoch 28/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8682 - accuracy: 0.6403 - val_loss: 1.0064 - val_accuracy: 0.5993 Epoch 29/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8673 - accuracy: 0.6403 - val_loss: 1.0094 - val_accuracy: 0.5956 Epoch 30/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8567 - accuracy: 0.6373 - val_loss: 1.0086 - val_accuracy: 0.6029 Epoch 31/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8354 - accuracy: 0.6620 - val_loss: 1.0131 - val_accuracy: 0.5956 Epoch 32/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8044 - accuracy: 0.6753 - val_loss: 1.0175 - val_accuracy: 0.6066 Epoch 33/200 34/34 [==============================] - 0s 5ms/step - loss: 0.8183 - accuracy: 0.6637 - val_loss: 1.0181 - val_accuracy: 0.6140 Epoch 34/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8119 - accuracy: 0.6650 - val_loss: 1.0104 - val_accuracy: 0.6140 Epoch 35/200 34/34 [==============================] - 0s 3ms/step - loss: 0.8211 - accuracy: 0.6607 - val_loss: 1.0123 - val_accuracy: 0.6029 Epoch 36/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7861 - accuracy: 0.6596 - val_loss: 1.0147 - val_accuracy: 0.6029 Epoch 37/200 34/34 [==============================] - 0s 5ms/step - loss: 0.8172 - accuracy: 0.6568 - val_loss: 1.0142 - val_accuracy: 0.6140 Epoch 38/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8071 - accuracy: 0.6352 - val_loss: 1.0195 - val_accuracy: 0.5993 Epoch 39/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7966 - accuracy: 0.6594 - val_loss: 1.0157 - val_accuracy: 0.6029 Epoch 40/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8036 - accuracy: 0.6654 - val_loss: 1.0177 - val_accuracy: 0.6029 Epoch 41/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7645 - accuracy: 0.6702 - val_loss: 1.0247 - val_accuracy: 0.6066 Epoch 42/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7806 - accuracy: 0.6574 - val_loss: 1.0208 - val_accuracy: 0.5993 Epoch 43/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7437 - accuracy: 0.6648 - val_loss: 1.0218 - val_accuracy: 0.6176 Epoch 44/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7517 - accuracy: 0.6931 - val_loss: 1.0212 - val_accuracy: 0.6140 Epoch 45/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7691 - accuracy: 0.6828 - val_loss: 1.0183 - val_accuracy: 0.6213 Epoch 46/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7616 - accuracy: 0.6826 - val_loss: 1.0204 - val_accuracy: 0.6140 Epoch 47/200 34/34 [==============================] - 0s 5ms/step - loss: 0.7777 - accuracy: 0.6776 - val_loss: 1.0305 - val_accuracy: 0.6250 Epoch 48/200 34/34 [==============================] - 0s 3ms/step - loss: 0.8014 - accuracy: 0.6546 - val_loss: 1.0331 - val_accuracy: 0.6176 Epoch 49/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7336 - accuracy: 0.7015 - val_loss: 1.0342 - val_accuracy: 0.6360 Epoch 50/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7578 - accuracy: 0.6883 - val_loss: 1.0310 - val_accuracy: 0.6213 Epoch 51/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7314 - accuracy: 0.6966 - val_loss: 1.0290 - val_accuracy: 0.6213 Epoch 52/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7434 - accuracy: 0.6961 - val_loss: 1.0289 - val_accuracy: 0.6250 Epoch 53/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7629 - accuracy: 0.6763 - val_loss: 1.0283 - val_accuracy: 0.6324 Epoch 54/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7697 - accuracy: 0.6614 - val_loss: 1.0292 - val_accuracy: 0.6360 Epoch 55/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7567 - accuracy: 0.6909 - val_loss: 1.0257 - val_accuracy: 0.6250 Epoch 56/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7710 - accuracy: 0.6839 - val_loss: 1.0277 - val_accuracy: 0.6324 Epoch 57/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7425 - accuracy: 0.6893 - val_loss: 1.0357 - val_accuracy: 0.6213 Epoch 58/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7495 - accuracy: 0.6780 - val_loss: 1.0369 - val_accuracy: 0.6176 Epoch 59/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7724 - accuracy: 0.6504 - val_loss: 1.0398 - val_accuracy: 0.6176 Epoch 60/200 34/34 [==============================] - 0s 4ms/step - loss: 0.8079 - accuracy: 0.6539 - val_loss: 1.0546 - val_accuracy: 0.6176 Epoch 61/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7222 - accuracy: 0.6936 - val_loss: 1.0456 - val_accuracy: 0.6213 Epoch 62/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7445 - accuracy: 0.6843 - val_loss: 1.0499 - val_accuracy: 0.6066 Epoch 63/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7525 - accuracy: 0.6753 - val_loss: 1.0525 - val_accuracy: 0.6213 Epoch 64/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7311 - accuracy: 0.6991 - val_loss: 1.0542 - val_accuracy: 0.6029 Epoch 65/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7149 - accuracy: 0.6817 - val_loss: 1.0620 - val_accuracy: 0.6213 Epoch 66/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7326 - accuracy: 0.6768 - val_loss: 1.0623 - val_accuracy: 0.6213 Epoch 67/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7047 - accuracy: 0.6789 - val_loss: 1.0522 - val_accuracy: 0.6103 Epoch 68/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7822 - accuracy: 0.6712 - val_loss: 1.0425 - val_accuracy: 0.6140 Epoch 69/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7483 - accuracy: 0.6891 - val_loss: 1.0553 - val_accuracy: 0.6140 Epoch 70/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7270 - accuracy: 0.6762 - val_loss: 1.0532 - val_accuracy: 0.6140 Epoch 71/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7570 - accuracy: 0.6754 - val_loss: 1.0631 - val_accuracy: 0.6140 Epoch 72/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7566 - accuracy: 0.6883 - val_loss: 1.0561 - val_accuracy: 0.6140 Epoch 73/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7232 - accuracy: 0.6912 - val_loss: 1.0615 - val_accuracy: 0.6103 Epoch 74/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7284 - accuracy: 0.6809 - val_loss: 1.0659 - val_accuracy: 0.6066 Epoch 75/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7360 - accuracy: 0.6813 - val_loss: 1.0633 - val_accuracy: 0.5993 Epoch 76/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7251 - accuracy: 0.6753 - val_loss: 1.0697 - val_accuracy: 0.6066 Epoch 77/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7555 - accuracy: 0.6803 - val_loss: 1.0625 - val_accuracy: 0.5956 Epoch 78/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7047 - accuracy: 0.6999 - val_loss: 1.0699 - val_accuracy: 0.5919 Epoch 79/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7034 - accuracy: 0.6859 - val_loss: 1.0689 - val_accuracy: 0.5956 Epoch 80/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7086 - accuracy: 0.7087 - val_loss: 1.0717 - val_accuracy: 0.6029 Epoch 81/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6999 - accuracy: 0.6873 - val_loss: 1.0790 - val_accuracy: 0.6103 Epoch 82/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6841 - accuracy: 0.7295 - val_loss: 1.0885 - val_accuracy: 0.6066 Epoch 83/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7396 - accuracy: 0.6771 - val_loss: 1.0938 - val_accuracy: 0.6140 Epoch 84/200 34/34 [==============================] - 0s 3ms/step - loss: 0.7183 - accuracy: 0.6826 - val_loss: 1.0989 - val_accuracy: 0.6066 Epoch 85/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7172 - accuracy: 0.6853 - val_loss: 1.1001 - val_accuracy: 0.6140 Epoch 86/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6978 - accuracy: 0.6963 - val_loss: 1.1053 - val_accuracy: 0.6066 Epoch 87/200 34/34 [==============================] - 0s 3ms/step - loss: 0.6878 - accuracy: 0.7061 - val_loss: 1.1092 - val_accuracy: 0.6029 Epoch 88/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6671 - accuracy: 0.7235 - val_loss: 1.1112 - val_accuracy: 0.5882 Epoch 89/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7179 - accuracy: 0.6933 - val_loss: 1.1043 - val_accuracy: 0.5919 Epoch 90/200 34/34 [==============================] - 0s 5ms/step - loss: 0.6990 - accuracy: 0.7025 - val_loss: 1.1224 - val_accuracy: 0.5919 Epoch 91/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6648 - accuracy: 0.7080 - val_loss: 1.1190 - val_accuracy: 0.5846 Epoch 92/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6819 - accuracy: 0.6902 - val_loss: 1.1234 - val_accuracy: 0.5993 Epoch 93/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6497 - accuracy: 0.7287 - val_loss: 1.1270 - val_accuracy: 0.5846 Epoch 94/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6724 - accuracy: 0.7272 - val_loss: 1.1265 - val_accuracy: 0.5846 Epoch 95/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6407 - accuracy: 0.7485 - val_loss: 1.1208 - val_accuracy: 0.5919 Epoch 96/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7382 - accuracy: 0.6756 - val_loss: 1.1264 - val_accuracy: 0.5882 Epoch 97/200 34/34 [==============================] - 0s 4ms/step - loss: 0.7015 - accuracy: 0.6903 - val_loss: 1.1311 - val_accuracy: 0.5993 Epoch 98/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6860 - accuracy: 0.7164 - val_loss: 1.1263 - val_accuracy: 0.6066 Epoch 99/200 34/34 [==============================] - 0s 3ms/step - loss: 0.6779 - accuracy: 0.7073 - val_loss: 1.1244 - val_accuracy: 0.5956 Epoch 100/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6384 - accuracy: 0.7465 - val_loss: 1.1269 - val_accuracy: 0.6103 Epoch 101/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6425 - accuracy: 0.7299 - val_loss: 1.1273 - val_accuracy: 0.6250 Epoch 102/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6441 - accuracy: 0.7225 - val_loss: 1.1313 - val_accuracy: 0.5919 Epoch 103/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6539 - accuracy: 0.7269 - val_loss: 1.1458 - val_accuracy: 0.5956 Epoch 104/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6523 - accuracy: 0.7320 - val_loss: 1.1433 - val_accuracy: 0.5882 Epoch 105/200 34/34 [==============================] - 0s 5ms/step - loss: 0.6824 - accuracy: 0.7123 - val_loss: 1.1394 - val_accuracy: 0.5882 Epoch 106/200 34/34 [==============================] - 0s 5ms/step - loss: 0.6319 - accuracy: 0.7281 - val_loss: 1.1460 - val_accuracy: 0.5919 Epoch 107/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6802 - accuracy: 0.7129 - val_loss: 1.1391 - val_accuracy: 0.5993 Epoch 108/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6909 - accuracy: 0.7101 - val_loss: 1.1365 - val_accuracy: 0.5882 Epoch 109/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6438 - accuracy: 0.7384 - val_loss: 1.1461 - val_accuracy: 0.5846 Epoch 110/200 34/34 [==============================] - 0s 5ms/step - loss: 0.6227 - accuracy: 0.7475 - val_loss: 1.1528 - val_accuracy: 0.6066 Epoch 111/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6608 - accuracy: 0.7182 - val_loss: 1.1523 - val_accuracy: 0.6176 Epoch 112/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6634 - accuracy: 0.7235 - val_loss: 1.1532 - val_accuracy: 0.6066 Epoch 113/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6691 - accuracy: 0.7142 - val_loss: 1.1492 - val_accuracy: 0.5956 Epoch 114/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6183 - accuracy: 0.7464 - val_loss: 1.1571 - val_accuracy: 0.5956 Epoch 115/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6605 - accuracy: 0.7134 - val_loss: 1.1612 - val_accuracy: 0.6029 Epoch 116/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6572 - accuracy: 0.7159 - val_loss: 1.1689 - val_accuracy: 0.5919 Epoch 117/200 34/34 [==============================] - 0s 3ms/step - loss: 0.6755 - accuracy: 0.7062 - val_loss: 1.1636 - val_accuracy: 0.5882 Epoch 118/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6161 - accuracy: 0.7463 - val_loss: 1.1612 - val_accuracy: 0.6029 Epoch 119/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6324 - accuracy: 0.7393 - val_loss: 1.1658 - val_accuracy: 0.5772 Epoch 120/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6117 - accuracy: 0.7481 - val_loss: 1.1625 - val_accuracy: 0.5772 Epoch 121/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6267 - accuracy: 0.7355 - val_loss: 1.1750 - val_accuracy: 0.5882 Epoch 122/200 34/34 [==============================] - 0s 3ms/step - loss: 0.6406 - accuracy: 0.7277 - val_loss: 1.1724 - val_accuracy: 0.5809 Epoch 123/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5921 - accuracy: 0.7503 - val_loss: 1.1636 - val_accuracy: 0.5772 Epoch 124/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6219 - accuracy: 0.7356 - val_loss: 1.1629 - val_accuracy: 0.5772 Epoch 125/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5980 - accuracy: 0.7500 - val_loss: 1.1869 - val_accuracy: 0.5772 Epoch 126/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6359 - accuracy: 0.7294 - val_loss: 1.1690 - val_accuracy: 0.5735 Epoch 127/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6648 - accuracy: 0.7078 - val_loss: 1.1774 - val_accuracy: 0.5809 Epoch 128/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6154 - accuracy: 0.7354 - val_loss: 1.1901 - val_accuracy: 0.5919 Epoch 129/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6202 - accuracy: 0.7450 - val_loss: 1.1814 - val_accuracy: 0.5919 Epoch 130/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6110 - accuracy: 0.7656 - val_loss: 1.1692 - val_accuracy: 0.5772 Epoch 131/200 34/34 [==============================] - 0s 3ms/step - loss: 0.5817 - accuracy: 0.7545 - val_loss: 1.1835 - val_accuracy: 0.5772 Epoch 132/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5929 - accuracy: 0.7352 - val_loss: 1.1861 - val_accuracy: 0.5735 Epoch 133/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6059 - accuracy: 0.7363 - val_loss: 1.1838 - val_accuracy: 0.5882 Epoch 134/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5692 - accuracy: 0.7671 - val_loss: 1.2018 - val_accuracy: 0.5772 Epoch 135/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5802 - accuracy: 0.7445 - val_loss: 1.2014 - val_accuracy: 0.5919 Epoch 136/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5774 - accuracy: 0.7614 - val_loss: 1.2001 - val_accuracy: 0.5919 Epoch 137/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6030 - accuracy: 0.7394 - val_loss: 1.2133 - val_accuracy: 0.5735 Epoch 138/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5884 - accuracy: 0.7573 - val_loss: 1.2153 - val_accuracy: 0.5882 Epoch 139/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6145 - accuracy: 0.7194 - val_loss: 1.2114 - val_accuracy: 0.5846 Epoch 140/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6025 - accuracy: 0.7472 - val_loss: 1.2378 - val_accuracy: 0.5772 Epoch 141/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5855 - accuracy: 0.7647 - val_loss: 1.2256 - val_accuracy: 0.5846 Epoch 142/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6097 - accuracy: 0.7304 - val_loss: 1.2376 - val_accuracy: 0.5625 Epoch 143/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6408 - accuracy: 0.7349 - val_loss: 1.2349 - val_accuracy: 0.5699 Epoch 144/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5683 - accuracy: 0.7746 - val_loss: 1.2376 - val_accuracy: 0.5662 Epoch 145/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6420 - accuracy: 0.7181 - val_loss: 1.2394 - val_accuracy: 0.5699 Epoch 146/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5661 - accuracy: 0.7630 - val_loss: 1.2420 - val_accuracy: 0.5735 Epoch 147/200 34/34 [==============================] - 0s 3ms/step - loss: 0.5874 - accuracy: 0.7597 - val_loss: 1.2543 - val_accuracy: 0.5588 Epoch 148/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5537 - accuracy: 0.7843 - val_loss: 1.2432 - val_accuracy: 0.5699 Epoch 149/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5632 - accuracy: 0.7433 - val_loss: 1.2355 - val_accuracy: 0.5625 Epoch 150/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5557 - accuracy: 0.7693 - val_loss: 1.2295 - val_accuracy: 0.5956 Epoch 151/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5618 - accuracy: 0.7728 - val_loss: 1.2325 - val_accuracy: 0.5772 Epoch 152/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6001 - accuracy: 0.7378 - val_loss: 1.2138 - val_accuracy: 0.5846 Epoch 153/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5484 - accuracy: 0.7630 - val_loss: 1.2371 - val_accuracy: 0.5772 Epoch 154/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5738 - accuracy: 0.7740 - val_loss: 1.2302 - val_accuracy: 0.5735 Epoch 155/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6033 - accuracy: 0.7600 - val_loss: 1.2336 - val_accuracy: 0.5809 Epoch 156/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5910 - accuracy: 0.7390 - val_loss: 1.2457 - val_accuracy: 0.5993 Epoch 157/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5468 - accuracy: 0.7763 - val_loss: 1.2684 - val_accuracy: 0.5735 Epoch 158/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5701 - accuracy: 0.7558 - val_loss: 1.2515 - val_accuracy: 0.5919 Epoch 159/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5752 - accuracy: 0.7667 - val_loss: 1.2732 - val_accuracy: 0.5699 Epoch 160/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5575 - accuracy: 0.7838 - val_loss: 1.2624 - val_accuracy: 0.5735 Epoch 161/200 34/34 [==============================] - 0s 4ms/step - loss: 0.6205 - accuracy: 0.7306 - val_loss: 1.2640 - val_accuracy: 0.5772 Epoch 162/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5771 - accuracy: 0.7741 - val_loss: 1.2441 - val_accuracy: 0.5846 Epoch 163/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5715 - accuracy: 0.7545 - val_loss: 1.2553 - val_accuracy: 0.5735 Epoch 164/200 34/34 [==============================] - 0s 3ms/step - loss: 0.5772 - accuracy: 0.7690 - val_loss: 1.2430 - val_accuracy: 0.5882 Epoch 165/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5333 - accuracy: 0.7775 - val_loss: 1.2492 - val_accuracy: 0.5625 Epoch 166/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5590 - accuracy: 0.7636 - val_loss: 1.2398 - val_accuracy: 0.5772 Epoch 167/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5670 - accuracy: 0.7602 - val_loss: 1.2724 - val_accuracy: 0.5846 Epoch 168/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5694 - accuracy: 0.7592 - val_loss: 1.2557 - val_accuracy: 0.5809 Epoch 169/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5591 - accuracy: 0.7548 - val_loss: 1.2646 - val_accuracy: 0.5882 Epoch 170/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5481 - accuracy: 0.7750 - val_loss: 1.2566 - val_accuracy: 0.5735 Epoch 171/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5428 - accuracy: 0.7828 - val_loss: 1.2433 - val_accuracy: 0.5882 Epoch 172/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5323 - accuracy: 0.7851 - val_loss: 1.2731 - val_accuracy: 0.5772 Epoch 173/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5671 - accuracy: 0.7535 - val_loss: 1.2875 - val_accuracy: 0.5809 Epoch 174/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5802 - accuracy: 0.7592 - val_loss: 1.2837 - val_accuracy: 0.5699 Epoch 175/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5460 - accuracy: 0.7750 - val_loss: 1.2970 - val_accuracy: 0.5478 Epoch 176/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5834 - accuracy: 0.7589 - val_loss: 1.2739 - val_accuracy: 0.5625 Epoch 177/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5316 - accuracy: 0.7856 - val_loss: 1.2877 - val_accuracy: 0.5735 Epoch 178/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5754 - accuracy: 0.7716 - val_loss: 1.2731 - val_accuracy: 0.5662 Epoch 179/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5199 - accuracy: 0.7989 - val_loss: 1.2825 - val_accuracy: 0.5735 Epoch 180/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5675 - accuracy: 0.7595 - val_loss: 1.2756 - val_accuracy: 0.5772 Epoch 181/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5789 - accuracy: 0.7505 - val_loss: 1.2995 - val_accuracy: 0.5588 Epoch 182/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5293 - accuracy: 0.7653 - val_loss: 1.3069 - val_accuracy: 0.5772 Epoch 183/200 34/34 [==============================] - 0s 8ms/step - loss: 0.5836 - accuracy: 0.7564 - val_loss: 1.2867 - val_accuracy: 0.5625 Epoch 184/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5664 - accuracy: 0.7711 - val_loss: 1.2922 - val_accuracy: 0.5772 Epoch 185/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5769 - accuracy: 0.7578 - val_loss: 1.3018 - val_accuracy: 0.5735 Epoch 186/200 34/34 [==============================] - 0s 7ms/step - loss: 0.5758 - accuracy: 0.7481 - val_loss: 1.2974 - val_accuracy: 0.5735 Epoch 187/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5560 - accuracy: 0.7666 - val_loss: 1.2930 - val_accuracy: 0.5772 Epoch 188/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5342 - accuracy: 0.7999 - val_loss: 1.2826 - val_accuracy: 0.5699 Epoch 189/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5550 - accuracy: 0.7612 - val_loss: 1.3142 - val_accuracy: 0.5735 Epoch 190/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5355 - accuracy: 0.7680 - val_loss: 1.3007 - val_accuracy: 0.5699 Epoch 191/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5114 - accuracy: 0.7950 - val_loss: 1.3034 - val_accuracy: 0.5588 Epoch 192/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5545 - accuracy: 0.7616 - val_loss: 1.2889 - val_accuracy: 0.5772 Epoch 193/200 34/34 [==============================] - 0s 6ms/step - loss: 0.5489 - accuracy: 0.7659 - val_loss: 1.3045 - val_accuracy: 0.5588 Epoch 194/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5773 - accuracy: 0.7691 - val_loss: 1.3025 - val_accuracy: 0.5625 Epoch 195/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5286 - accuracy: 0.7820 - val_loss: 1.2897 - val_accuracy: 0.5809 Epoch 196/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5097 - accuracy: 0.7868 - val_loss: 1.3052 - val_accuracy: 0.5735 Epoch 197/200 34/34 [==============================] - 0s 5ms/step - loss: 0.5658 - accuracy: 0.7722 - val_loss: 1.3083 - val_accuracy: 0.5699 Epoch 198/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5626 - accuracy: 0.7749 - val_loss: 1.3209 - val_accuracy: 0.5735 Epoch 199/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5292 - accuracy: 0.7760 - val_loss: 1.3327 - val_accuracy: 0.5772 Epoch 200/200 34/34 [==============================] - 0s 4ms/step - loss: 0.5294 - accuracy: 0.7737 - val_loss: 1.3015 - val_accuracy: 0.5772
<tensorflow.python.keras.callbacks.History at 0x146a0b8e0>
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
loss,accu=model.evaluate(X_train, Y_train)
print(colored('\x1B[1mtTrain Loss:','red'),(loss))
print(colored('\x1B[1mTrain Accuracy:','green'),(accu))
Evaluating model with train dataset 34/34 [==============================] - 0s 3ms/step - loss: 0.2963 - accuracy: 0.9217 Test Loss: 0.2963239550590515 Test Accuracy: 0.921658992767334
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
loss,accu=model.evaluate(X_test, Y_test)
print(colored('\x1B[1mTest Loss:','red'),(loss))
print(colored('\x1B[1mTest Accuracy:','green'),(accu))
Evaluating model with test dataset 9/9 [==============================] - 0s 2ms/step - loss: 1.3015 - accuracy: 0.5772 Test Loss: 1.301488995552063 Test Accuracy: 0.5772058963775635
print(colored('\x1B[1m Tuning hperparameters to get best parameters','blue'))
def model_builder(hp):
model = keras.Sequential()
hp_learning_rate = hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4,2e-2,2e-4,2e-4])
d = hp.Choice('decay', values=[1e-4, 1e-6, 1e-8])
m = hp.Choice('momentum', values=[.7,.9,.5])
Lambda = hp.Choice('Lambda', values=[1e-2, 1e-4, 1e-6])
hp_units = hp.Int('units', min_value=32, max_value=512, step=32)
model.add(keras.layers.Flatten(input_shape=(11,)))
model.add(keras.layers.Dense(units=hp_units, activation='relu'))
model.add(Activation('relu'))
model.add(Dense(40,activation='relu'))
model.add(Dense(60,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(45,activation='relu'))
model.add(Dense(15,activation='relu'))
model.add(Dropout(.02))
model.add(Dense(9, activation='softmax', kernel_regularizer=regularizers.l2(Lambda)))
model.compile(optimizer=keras.optimizers.SGD(learning_rate=lr, momentum=m, decay=d),
loss='categorical_crossentropy',metrics=['accuracy'])
return model
Tuning hperparameters to get best parameters
tuner = kt.Hyperband(model_builder,
objective='val_accuracy',
max_epochs=10,
factor=3,
directory='my_dir',
project_name='intro_to_kt')
INFO:tensorflow:Reloading Oracle from existing project my_dir/intro_to_kt/oracle.json INFO:tensorflow:Reloading Tuner from my_dir/intro_to_kt/tuner0.json
stop_early = tf.keras.callbacks.EarlyStopping(monitor='val_accuracy', patience=5)
tuner.search(X_train, Y_train, epochs=100, validation_split=0.3, callbacks=[stop_early])
best_hps=tuner.get_best_hyperparameters(num_trials=1)[0]
INFO:tensorflow:Oracle triggered exit
print(f"""
The hyperparameter search is complete. The optimal number of units in the first densely-connected
layer is {best_hps.get('units')} and the optimal learning rate for the optimizer
is {best_hps.get('learning_rate')}.
""")
The hyperparameter search is complete. The optimal number of units in the first densely-connected layer is 256 and the optimal learning rate for the optimizer is 0.01.
print(colored('\x1B[1m Rebuilding and Refitting the Model with best parameters','blue'))
model = tuner.hypermodel.build(best_hps)
Rebuilding and Refitting the Model with best parameters
history = model.fit(X_train, Y_train, epochs=50, validation_split=0.3)
val_acc_per_epoch = history.history['val_accuracy']
best_epoch = val_acc_per_epoch.index(max(val_acc_per_epoch)) + 1
print('Best epoch: %d' % (best_epoch,))
Epoch 1/50 24/24 [==============================] - 1s 20ms/step - loss: 2.1867 - accuracy: 0.2418 - val_loss: 1.7407 - val_accuracy: 0.3712 Epoch 2/50 24/24 [==============================] - 0s 6ms/step - loss: 1.5833 - accuracy: 0.4269 - val_loss: 1.2772 - val_accuracy: 0.5767 Epoch 3/50 24/24 [==============================] - 0s 7ms/step - loss: 1.2504 - accuracy: 0.5619 - val_loss: 1.1897 - val_accuracy: 0.5706 Epoch 4/50 24/24 [==============================] - 0s 8ms/step - loss: 1.1503 - accuracy: 0.6065 - val_loss: 1.1498 - val_accuracy: 0.5583 Epoch 5/50 24/24 [==============================] - 0s 9ms/step - loss: 1.1096 - accuracy: 0.6153 - val_loss: 1.0900 - val_accuracy: 0.5706 Epoch 6/50 24/24 [==============================] - 0s 4ms/step - loss: 1.1181 - accuracy: 0.5824 - val_loss: 1.0469 - val_accuracy: 0.5644 Epoch 7/50 24/24 [==============================] - 0s 4ms/step - loss: 1.1032 - accuracy: 0.5580 - val_loss: 1.0234 - val_accuracy: 0.5736 Epoch 8/50 24/24 [==============================] - 0s 5ms/step - loss: 1.1205 - accuracy: 0.5399 - val_loss: 1.0788 - val_accuracy: 0.5399 Epoch 9/50 24/24 [==============================] - 0s 5ms/step - loss: 1.0478 - accuracy: 0.5876 - val_loss: 1.0049 - val_accuracy: 0.5920 Epoch 10/50 24/24 [==============================] - 0s 5ms/step - loss: 1.0083 - accuracy: 0.6237 - val_loss: 1.0044 - val_accuracy: 0.6043 Epoch 11/50 24/24 [==============================] - 0s 6ms/step - loss: 0.9867 - accuracy: 0.6104 - val_loss: 1.0092 - val_accuracy: 0.5920 Epoch 12/50 24/24 [==============================] - 0s 5ms/step - loss: 1.0083 - accuracy: 0.5964 - val_loss: 1.0108 - val_accuracy: 0.5920 Epoch 13/50 24/24 [==============================] - 0s 4ms/step - loss: 1.0062 - accuracy: 0.6015 - val_loss: 1.0082 - val_accuracy: 0.5798 Epoch 14/50 24/24 [==============================] - 0s 4ms/step - loss: 1.0215 - accuracy: 0.6090 - val_loss: 1.0128 - val_accuracy: 0.5828 Epoch 15/50 24/24 [==============================] - 0s 5ms/step - loss: 0.9738 - accuracy: 0.6414 - val_loss: 0.9820 - val_accuracy: 0.6043 Epoch 16/50 24/24 [==============================] - 0s 5ms/step - loss: 0.9240 - accuracy: 0.6399 - val_loss: 1.0078 - val_accuracy: 0.6196 Epoch 17/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9459 - accuracy: 0.6432 - val_loss: 0.9768 - val_accuracy: 0.6074 Epoch 18/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9479 - accuracy: 0.6416 - val_loss: 0.9891 - val_accuracy: 0.5920 Epoch 19/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9178 - accuracy: 0.6580 - val_loss: 0.9903 - val_accuracy: 0.6012 Epoch 20/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8886 - accuracy: 0.6691 - val_loss: 0.9833 - val_accuracy: 0.6166 Epoch 21/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9265 - accuracy: 0.6599 - val_loss: 0.9894 - val_accuracy: 0.6196 Epoch 22/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8816 - accuracy: 0.6681 - val_loss: 1.0849 - val_accuracy: 0.5521 Epoch 23/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9489 - accuracy: 0.6366 - val_loss: 1.1040 - val_accuracy: 0.5552 Epoch 24/50 24/24 [==============================] - 0s 4ms/step - loss: 0.9220 - accuracy: 0.6373 - val_loss: 0.9793 - val_accuracy: 0.6135 Epoch 25/50 24/24 [==============================] - 0s 5ms/step - loss: 0.8725 - accuracy: 0.6950 - val_loss: 0.9991 - val_accuracy: 0.6135 Epoch 26/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8582 - accuracy: 0.6759 - val_loss: 0.9722 - val_accuracy: 0.6074 Epoch 27/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8942 - accuracy: 0.6685 - val_loss: 0.9885 - val_accuracy: 0.6043 Epoch 28/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8536 - accuracy: 0.6638 - val_loss: 1.0037 - val_accuracy: 0.5920 Epoch 29/50 24/24 [==============================] - 0s 5ms/step - loss: 0.8917 - accuracy: 0.6481 - val_loss: 1.0233 - val_accuracy: 0.6135 Epoch 30/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8607 - accuracy: 0.6729 - val_loss: 0.9833 - val_accuracy: 0.6135 Epoch 31/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8429 - accuracy: 0.6687 - val_loss: 0.9962 - val_accuracy: 0.6135 Epoch 32/50 24/24 [==============================] - 0s 4ms/step - loss: 0.8511 - accuracy: 0.6888 - val_loss: 1.0049 - val_accuracy: 0.6012 Epoch 33/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7908 - accuracy: 0.7064 - val_loss: 1.0048 - val_accuracy: 0.6227 Epoch 34/50 24/24 [==============================] - 0s 5ms/step - loss: 0.8390 - accuracy: 0.6838 - val_loss: 1.0292 - val_accuracy: 0.5859 Epoch 35/50 24/24 [==============================] - 0s 5ms/step - loss: 0.8420 - accuracy: 0.6802 - val_loss: 1.0226 - val_accuracy: 0.5982 Epoch 36/50 24/24 [==============================] - 0s 5ms/step - loss: 0.7954 - accuracy: 0.6962 - val_loss: 1.0126 - val_accuracy: 0.5706 Epoch 37/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7725 - accuracy: 0.7038 - val_loss: 1.0173 - val_accuracy: 0.6104 Epoch 38/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7739 - accuracy: 0.7219 - val_loss: 1.0189 - val_accuracy: 0.6135 Epoch 39/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7794 - accuracy: 0.7019 - val_loss: 1.0394 - val_accuracy: 0.5767 Epoch 40/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7806 - accuracy: 0.7152 - val_loss: 1.0268 - val_accuracy: 0.6135 Epoch 41/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7735 - accuracy: 0.6950 - val_loss: 1.1053 - val_accuracy: 0.5982 Epoch 42/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7814 - accuracy: 0.7291 - val_loss: 1.0603 - val_accuracy: 0.6196 Epoch 43/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7741 - accuracy: 0.7148 - val_loss: 1.0936 - val_accuracy: 0.5982 Epoch 44/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7186 - accuracy: 0.7418 - val_loss: 1.0599 - val_accuracy: 0.6074 Epoch 45/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7228 - accuracy: 0.7258 - val_loss: 1.1831 - val_accuracy: 0.5890 Epoch 46/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7420 - accuracy: 0.7183 - val_loss: 1.1000 - val_accuracy: 0.5583 Epoch 47/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7118 - accuracy: 0.7370 - val_loss: 1.1419 - val_accuracy: 0.5828 Epoch 48/50 24/24 [==============================] - 0s 3ms/step - loss: 0.7218 - accuracy: 0.7414 - val_loss: 1.1625 - val_accuracy: 0.5951 Epoch 49/50 24/24 [==============================] - 0s 3ms/step - loss: 0.6834 - accuracy: 0.7616 - val_loss: 1.1225 - val_accuracy: 0.5706 Epoch 50/50 24/24 [==============================] - 0s 4ms/step - loss: 0.7208 - accuracy: 0.7264 - val_loss: 1.1490 - val_accuracy: 0.5828 Best epoch: 33
print(colored('\x1B[1m Rebuilding and Refitting the Model with best epochs','blue'))
hypermodel = tuner.hypermodel.build(best_hps)
Rebuilding and Refitting the Model with best epochs
hypermodel.fit(X_train, Y_train, epochs=best_epoch, validation_split=0.3)
Epoch 1/33 24/24 [==============================] - 1s 11ms/step - loss: 2.0598 - accuracy: 0.3983 - val_loss: 1.4583 - val_accuracy: 0.4448 Epoch 2/33 24/24 [==============================] - 0s 5ms/step - loss: 1.3978 - accuracy: 0.4554 - val_loss: 1.2595 - val_accuracy: 0.5460 Epoch 3/33 24/24 [==============================] - 0s 4ms/step - loss: 1.2942 - accuracy: 0.5489 - val_loss: 1.1982 - val_accuracy: 0.5767 Epoch 4/33 24/24 [==============================] - 0s 3ms/step - loss: 1.2165 - accuracy: 0.5792 - val_loss: 1.1697 - val_accuracy: 0.5798 Epoch 5/33 24/24 [==============================] - 0s 4ms/step - loss: 1.1703 - accuracy: 0.5808 - val_loss: 1.1361 - val_accuracy: 0.5767 Epoch 6/33 24/24 [==============================] - 0s 5ms/step - loss: 1.2002 - accuracy: 0.5384 - val_loss: 1.1574 - val_accuracy: 0.5399 Epoch 7/33 24/24 [==============================] - 0s 4ms/step - loss: 1.1209 - accuracy: 0.5683 - val_loss: 1.0994 - val_accuracy: 0.5828 Epoch 8/33 24/24 [==============================] - 0s 3ms/step - loss: 1.1075 - accuracy: 0.5831 - val_loss: 1.0927 - val_accuracy: 0.5644 Epoch 9/33 24/24 [==============================] - 0s 5ms/step - loss: 1.0622 - accuracy: 0.5991 - val_loss: 1.0471 - val_accuracy: 0.5828 Epoch 10/33 24/24 [==============================] - 0s 4ms/step - loss: 1.0354 - accuracy: 0.6464 - val_loss: 1.0293 - val_accuracy: 0.6012 Epoch 11/33 24/24 [==============================] - 0s 3ms/step - loss: 0.9881 - accuracy: 0.6095 - val_loss: 1.0699 - val_accuracy: 0.5368 Epoch 12/33 24/24 [==============================] - 0s 4ms/step - loss: 1.0493 - accuracy: 0.5591 - val_loss: 1.0036 - val_accuracy: 0.6104 Epoch 13/33 24/24 [==============================] - 0s 3ms/step - loss: 0.9993 - accuracy: 0.6266 - val_loss: 1.0143 - val_accuracy: 0.6227 Epoch 14/33 24/24 [==============================] - 0s 3ms/step - loss: 1.0091 - accuracy: 0.6220 - val_loss: 0.9895 - val_accuracy: 0.6227 Epoch 15/33 24/24 [==============================] - 0s 8ms/step - loss: 0.9595 - accuracy: 0.6386 - val_loss: 1.0006 - val_accuracy: 0.5982 Epoch 16/33 24/24 [==============================] - 0s 3ms/step - loss: 0.9553 - accuracy: 0.6446 - val_loss: 0.9954 - val_accuracy: 0.5828 Epoch 17/33 24/24 [==============================] - 0s 3ms/step - loss: 0.9789 - accuracy: 0.5851 - val_loss: 0.9822 - val_accuracy: 0.6166 Epoch 18/33 24/24 [==============================] - 0s 3ms/step - loss: 0.9311 - accuracy: 0.6251 - val_loss: 1.0319 - val_accuracy: 0.5890 Epoch 19/33 24/24 [==============================] - 0s 4ms/step - loss: 0.9371 - accuracy: 0.6208 - val_loss: 1.0134 - val_accuracy: 0.5859 Epoch 20/33 24/24 [==============================] - 0s 4ms/step - loss: 0.8909 - accuracy: 0.6308 - val_loss: 0.9857 - val_accuracy: 0.6288 Epoch 21/33 24/24 [==============================] - 0s 5ms/step - loss: 0.9240 - accuracy: 0.6482 - val_loss: 0.9796 - val_accuracy: 0.6043 Epoch 22/33 24/24 [==============================] - 0s 5ms/step - loss: 0.9164 - accuracy: 0.6246 - val_loss: 0.9927 - val_accuracy: 0.6319 Epoch 23/33 24/24 [==============================] - 0s 6ms/step - loss: 0.8812 - accuracy: 0.6510 - val_loss: 1.0005 - val_accuracy: 0.6135 Epoch 24/33 24/24 [==============================] - 0s 4ms/step - loss: 0.8726 - accuracy: 0.6539 - val_loss: 1.0060 - val_accuracy: 0.6288 Epoch 25/33 24/24 [==============================] - 0s 5ms/step - loss: 0.7818 - accuracy: 0.7107 - val_loss: 1.0100 - val_accuracy: 0.5798 Epoch 26/33 24/24 [==============================] - 0s 4ms/step - loss: 0.8450 - accuracy: 0.6733 - val_loss: 1.0148 - val_accuracy: 0.6288 Epoch 27/33 24/24 [==============================] - 0s 3ms/step - loss: 0.8593 - accuracy: 0.6583 - val_loss: 1.0107 - val_accuracy: 0.6074 Epoch 28/33 24/24 [==============================] - 0s 3ms/step - loss: 0.8670 - accuracy: 0.6713 - val_loss: 1.0508 - val_accuracy: 0.6166 Epoch 29/33 24/24 [==============================] - 0s 3ms/step - loss: 0.7715 - accuracy: 0.7042 - val_loss: 1.0417 - val_accuracy: 0.6227 Epoch 30/33 24/24 [==============================] - 0s 3ms/step - loss: 0.8126 - accuracy: 0.6652 - val_loss: 1.0469 - val_accuracy: 0.6104 Epoch 31/33 24/24 [==============================] - 0s 4ms/step - loss: 0.8135 - accuracy: 0.6728 - val_loss: 1.0669 - val_accuracy: 0.5951 Epoch 32/33 24/24 [==============================] - 0s 4ms/step - loss: 0.8296 - accuracy: 0.6750 - val_loss: 1.0398 - val_accuracy: 0.6288 Epoch 33/33 24/24 [==============================] - 0s 4ms/step - loss: 0.7992 - accuracy: 0.6940 - val_loss: 1.0895 - val_accuracy: 0.5798
<tensorflow.python.keras.callbacks.History at 0x147777dc0>
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
Evaluating model with train dataset
loss,accu = hypermodel.evaluate(X_train, Y_train)
print(colored('\x1B[1mTrain Loss:','red'),(loss))
print(colored('\x1B[1mTrain Accuracy:','green'),(accu))
34/34 [==============================] - 0s 1ms/step - loss: 0.8563 - accuracy: 0.6728 Test Loss: 0.8563024997711182 Test Accuracy: 0.6728110313415527
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
Evaluating model with test dataset
loss,accu = hypermodel.evaluate(X_test, Y_test)
print(colored('\x1B[1mTest Loss:','red'),(loss))
print(colored('\x1B[1mTest Accuracy:','green'),(accu))
9/9 [==============================] - 0s 2ms/step - loss: 1.0886 - accuracy: 0.6029 Test Loss: 1.0886085033416748 Test Accuracy: 0.6029411554336548
lr = 2e-2
Lambda = 1e-4
d=1e-6
m=0.9
print(colored('\x1B[1m Building the Model','blue'))
cl_model = Sequential()
cl_model.add(keras.layers.Flatten(input_shape=(11,)))
cl_model.add(Dense(50))
cl_model.add(BatchNormalization())
cl_model.add(Dropout(.02))
cl_model.add(Activation('relu'))
cl_model.add(Dense(50))
cl_model.add(BatchNormalization())
cl_model.add(Dropout(.02))
cl_model.add(Activation('relu'))
cl_model.add(Dense(50))
cl_model.add(BatchNormalization())
cl_model.add(Dropout(.02))
cl_model.add(Activation('relu'))
cl_model.add(Dense(50))
cl_model.add(BatchNormalization())
cl_model.add(Dropout(.02))
cl_model.add(Activation('relu'))
cl_model.add(Dense(9, activation='softmax', kernel_regularizer=regularizers.l2(Lambda)))
Building the Model
opt = tf.keras.optimizers.SGD(learning_rate=lr, momentum=m, decay=d)
print(colored('\x1B[1m Compiling Model','blue'))
cl_model.compile(loss='categorical_crossentropy',optimizer=opt,metrics=['accuracy'])
Compiling Model
print(colored('\x1B[1m Fitting the Model','blue'))
Fitting the Model
history = cl_model.fit(X_train, Y_train, epochs=best_epoch,batch_size=bz, validation_split=0.3)
Epoch 1/33 31/31 [==============================] - 2s 13ms/step - loss: 2.0691 - accuracy: 0.2748 - val_loss: 1.3199 - val_accuracy: 0.5460 Epoch 2/33 31/31 [==============================] - 0s 4ms/step - loss: 1.0606 - accuracy: 0.5293 - val_loss: 1.1835 - val_accuracy: 0.5798 Epoch 3/33 31/31 [==============================] - 0s 4ms/step - loss: 0.9674 - accuracy: 0.5789 - val_loss: 1.0640 - val_accuracy: 0.6227 Epoch 4/33 31/31 [==============================] - 0s 4ms/step - loss: 0.9373 - accuracy: 0.5696 - val_loss: 1.0060 - val_accuracy: 0.6104 Epoch 5/33 31/31 [==============================] - 0s 5ms/step - loss: 0.9038 - accuracy: 0.5980 - val_loss: 0.9823 - val_accuracy: 0.5951 Epoch 6/33 31/31 [==============================] - 0s 4ms/step - loss: 0.9226 - accuracy: 0.5806 - val_loss: 0.9642 - val_accuracy: 0.5798 Epoch 7/33 31/31 [==============================] - 0s 4ms/step - loss: 0.8949 - accuracy: 0.5848 - val_loss: 1.0057 - val_accuracy: 0.5706 Epoch 8/33 31/31 [==============================] - 0s 4ms/step - loss: 0.8568 - accuracy: 0.6056 - val_loss: 0.9876 - val_accuracy: 0.5798 Epoch 9/33 31/31 [==============================] - 0s 4ms/step - loss: 0.8355 - accuracy: 0.6222 - val_loss: 1.0076 - val_accuracy: 0.5706 Epoch 10/33 31/31 [==============================] - 0s 5ms/step - loss: 0.8234 - accuracy: 0.6583 - val_loss: 0.9437 - val_accuracy: 0.6166 Epoch 11/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7815 - accuracy: 0.6597 - val_loss: 1.0015 - val_accuracy: 0.5951 Epoch 12/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7946 - accuracy: 0.6873 - val_loss: 1.0315 - val_accuracy: 0.5552 Epoch 13/33 31/31 [==============================] - 0s 8ms/step - loss: 0.7748 - accuracy: 0.6701 - val_loss: 1.0767 - val_accuracy: 0.5706 Epoch 14/33 31/31 [==============================] - 0s 6ms/step - loss: 0.7667 - accuracy: 0.6675 - val_loss: 1.0634 - val_accuracy: 0.5613 Epoch 15/33 31/31 [==============================] - 0s 6ms/step - loss: 0.7678 - accuracy: 0.6688 - val_loss: 1.1174 - val_accuracy: 0.5920 Epoch 16/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7724 - accuracy: 0.6692 - val_loss: 1.0642 - val_accuracy: 0.6074 Epoch 17/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7460 - accuracy: 0.6623 - val_loss: 1.1231 - val_accuracy: 0.5368 Epoch 18/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7345 - accuracy: 0.6713 - val_loss: 1.0901 - val_accuracy: 0.5399 Epoch 19/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7670 - accuracy: 0.6426 - val_loss: 1.1221 - val_accuracy: 0.5644 Epoch 20/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7841 - accuracy: 0.6582 - val_loss: 1.0379 - val_accuracy: 0.5859 Epoch 21/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7073 - accuracy: 0.6983 - val_loss: 1.1216 - val_accuracy: 0.5337 Epoch 22/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7279 - accuracy: 0.6955 - val_loss: 1.0339 - val_accuracy: 0.5951 Epoch 23/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7272 - accuracy: 0.6924 - val_loss: 1.1027 - val_accuracy: 0.5429 Epoch 24/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7199 - accuracy: 0.6737 - val_loss: 1.1083 - val_accuracy: 0.5644 Epoch 25/33 31/31 [==============================] - 0s 6ms/step - loss: 0.6853 - accuracy: 0.7135 - val_loss: 1.1051 - val_accuracy: 0.5890 Epoch 26/33 31/31 [==============================] - 0s 5ms/step - loss: 0.6532 - accuracy: 0.6907 - val_loss: 1.1814 - val_accuracy: 0.5613 Epoch 27/33 31/31 [==============================] - 0s 5ms/step - loss: 0.6402 - accuracy: 0.7125 - val_loss: 1.1788 - val_accuracy: 0.5521 Epoch 28/33 31/31 [==============================] - 0s 5ms/step - loss: 0.7584 - accuracy: 0.6737 - val_loss: 1.2264 - val_accuracy: 0.5368 Epoch 29/33 31/31 [==============================] - 0s 4ms/step - loss: 0.6634 - accuracy: 0.7100 - val_loss: 1.2708 - val_accuracy: 0.5153 Epoch 30/33 31/31 [==============================] - 0s 4ms/step - loss: 0.7346 - accuracy: 0.6927 - val_loss: 1.2404 - val_accuracy: 0.5859 Epoch 31/33 31/31 [==============================] - 0s 4ms/step - loss: 0.6647 - accuracy: 0.6882 - val_loss: 1.2523 - val_accuracy: 0.5798 Epoch 32/33 31/31 [==============================] - 0s 4ms/step - loss: 0.6198 - accuracy: 0.7341 - val_loss: 1.2266 - val_accuracy: 0.5491 Epoch 33/33 31/31 [==============================] - 0s 4ms/step - loss: 0.6336 - accuracy: 0.7114 - val_loss: 1.1975 - val_accuracy: 0.5491
print(colored('\x1B[1m Evaluating model with train dataset','blue'))
Evaluating model with train dataset
cl_model.fit(X_train, Y_train, epochs=best_epoch, batch_size=bz, verbose= 1)
loss1,accu1 = cl_model.evaluate(X_train, Y_train, verbose=0)
Epoch 1/33 44/44 [==============================] - 0s 3ms/step - loss: 0.8416 - accuracy: 0.6332 Epoch 2/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7853 - accuracy: 0.6581 Epoch 3/33 44/44 [==============================] - 0s 2ms/step - loss: 0.8111 - accuracy: 0.6516 Epoch 4/33 44/44 [==============================] - 0s 3ms/step - loss: 0.7439 - accuracy: 0.6820 Epoch 5/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7351 - accuracy: 0.6820 Epoch 6/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7393 - accuracy: 0.6673 Epoch 7/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7247 - accuracy: 0.6894 Epoch 8/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7212 - accuracy: 0.6774 Epoch 9/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7168 - accuracy: 0.6885 Epoch 10/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7173 - accuracy: 0.6848 Epoch 11/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6974 - accuracy: 0.6949 Epoch 12/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7035 - accuracy: 0.6912 Epoch 13/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6967 - accuracy: 0.7041 Epoch 14/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6707 - accuracy: 0.6968 Epoch 15/33 44/44 [==============================] - 0s 2ms/step - loss: 0.7022 - accuracy: 0.6903 Epoch 16/33 44/44 [==============================] - 0s 3ms/step - loss: 0.7201 - accuracy: 0.6820 Epoch 17/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6908 - accuracy: 0.6802 Epoch 18/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6524 - accuracy: 0.7217 Epoch 19/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6689 - accuracy: 0.7069 Epoch 20/33 44/44 [==============================] - 0s 3ms/step - loss: 0.7109 - accuracy: 0.6866 Epoch 21/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6409 - accuracy: 0.7051 Epoch 22/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6958 - accuracy: 0.7263 Epoch 23/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6782 - accuracy: 0.7005 Epoch 24/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6730 - accuracy: 0.7161 Epoch 25/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6538 - accuracy: 0.7198 Epoch 26/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6191 - accuracy: 0.7226 Epoch 27/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6320 - accuracy: 0.7198 Epoch 28/33 44/44 [==============================] - 0s 3ms/step - loss: 0.6055 - accuracy: 0.7318 Epoch 29/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6744 - accuracy: 0.7152 Epoch 30/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6401 - accuracy: 0.7235 Epoch 31/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6233 - accuracy: 0.7392 Epoch 32/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6349 - accuracy: 0.7300 Epoch 33/33 44/44 [==============================] - 0s 2ms/step - loss: 0.6338 - accuracy: 0.7401
print(colored('\x1B[1mTrain Loss:','red'),(loss1))
print(colored('\x1B[1mTrain Accuracy:','green'),(accu1))
Train Loss: 0.41428008675575256 Train Accuracy: 0.8442396521568298
print(colored('\x1B[1m Evaluating model with test dataset','blue'))
Evaluating model with test dataset
cl_model.fit(X_test, Y_test, epochs=best_epoch, batch_size=bz, verbose= 1)
loss,accu = cl_model.evaluate(X_test, Y_test, verbose=0)
Epoch 1/33 11/11 [==============================] - 0s 2ms/step - loss: 1.2797 - accuracy: 0.5772 Epoch 2/33 11/11 [==============================] - 0s 2ms/step - loss: 0.9841 - accuracy: 0.6507 Epoch 3/33 11/11 [==============================] - 0s 2ms/step - loss: 0.8416 - accuracy: 0.6765 Epoch 4/33 11/11 [==============================] - 0s 2ms/step - loss: 0.7928 - accuracy: 0.6507 Epoch 5/33 11/11 [==============================] - 0s 3ms/step - loss: 0.7388 - accuracy: 0.6838 Epoch 6/33 11/11 [==============================] - 0s 2ms/step - loss: 0.6791 - accuracy: 0.6875 Epoch 7/33 11/11 [==============================] - 0s 2ms/step - loss: 0.6110 - accuracy: 0.7353 Epoch 8/33 11/11 [==============================] - 0s 2ms/step - loss: 0.6281 - accuracy: 0.7353 Epoch 9/33 11/11 [==============================] - 0s 2ms/step - loss: 0.5747 - accuracy: 0.7757 Epoch 10/33 11/11 [==============================] - 0s 3ms/step - loss: 0.5555 - accuracy: 0.7500 Epoch 11/33 11/11 [==============================] - 0s 3ms/step - loss: 0.5524 - accuracy: 0.7868 Epoch 12/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4649 - accuracy: 0.7978 Epoch 13/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4957 - accuracy: 0.8088 Epoch 14/33 11/11 [==============================] - 0s 3ms/step - loss: 0.4764 - accuracy: 0.8051 Epoch 15/33 11/11 [==============================] - 0s 3ms/step - loss: 0.4657 - accuracy: 0.8309 Epoch 16/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4548 - accuracy: 0.8272 Epoch 17/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4559 - accuracy: 0.8272 Epoch 18/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4732 - accuracy: 0.8051 Epoch 19/33 11/11 [==============================] - 0s 3ms/step - loss: 0.4219 - accuracy: 0.8529 Epoch 20/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3782 - accuracy: 0.8603 Epoch 21/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3886 - accuracy: 0.8456 Epoch 22/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4197 - accuracy: 0.8015 Epoch 23/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4354 - accuracy: 0.8162 Epoch 24/33 11/11 [==============================] - 0s 2ms/step - loss: 0.4608 - accuracy: 0.8235 Epoch 25/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3833 - accuracy: 0.8493 Epoch 26/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3605 - accuracy: 0.8676 Epoch 27/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3767 - accuracy: 0.8529 Epoch 28/33 11/11 [==============================] - 0s 2ms/step - loss: 0.2982 - accuracy: 0.8897 Epoch 29/33 11/11 [==============================] - 0s 2ms/step - loss: 0.2862 - accuracy: 0.8897 Epoch 30/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3058 - accuracy: 0.9044 Epoch 31/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3617 - accuracy: 0.8713 Epoch 32/33 11/11 [==============================] - 0s 2ms/step - loss: 0.2968 - accuracy: 0.8603 Epoch 33/33 11/11 [==============================] - 0s 2ms/step - loss: 0.3259 - accuracy: 0.8824
print(colored('\x1B[1mTest Loss:','red'),(loss))
print(colored('\x1B[1mTest Accuracy:','green'),(accu))
Test Loss: 0.12206969410181046 Test Accuracy: 0.970588207244873
print(colored('\x1B[1mSaving Model:','blue'))
cl_model.save('NN Classification')
Saving Model:
INFO:tensorflow:Assets written to: NN Classification/assets
import tkinter as tk
from tkinter import ttk
import pandas as pd
import numpy as np
import re
nn = tk.Tk()
nn.title('Neural Networks GUI - Great Learning')
#import dataframe
name=ttk.Label(nn,text='Step 1: File Name')
name.grid(row=0,column=0,sticky=tk.W)
name_var=tk.StringVar()
name_entrybox=ttk.Entry(nn,width=16,textvariable=name_var)
name_entrybox.grid(row=0,column=1)
def Import_data():
global df
df=name_var.get()
df_ext=re.findall('\..*',df)
if df_ext==['.xlsx']:
df=pd.read_excel(df)
elif df_ext==['.csv']:
df=pd.read_csv(df)
#nndow to print confirmation
confirm='Done'
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=0,column=3)
confirm_entrybox.insert(1,str(confirm))
Import_button=ttk.Button(nn,text='Import Data',command=Import_data)
Import_button.grid(row=0,column=2)
#target variable name
target=ttk.Label(nn,text='Step 2: Target Column')
target.grid(row=1,column=0,sticky=tk.W)
target_var=tk.StringVar()
target_entrybox=ttk.Entry(nn,width=16,textvariable=target_var)
target_entrybox.grid(row=1,column=1)
from sklearn.preprocessing import StandardScaler
def Target_variable():
global df,X,y,target_name,X_train,X_test,Y_train,Y_test
target_name=target_var.get()
column_name=df.columns
column_name
found=0
for i in range(len(column_name)):
if column_name[i]==target_name:
confirm='Found'
Y=df[target_name]
X=df.drop(target_name,axis=1)
#split data
from sklearn.model_selection import train_test_split
X_train,X_test,Y_train,Y_test=train_test_split(X,Y,test_size=0.3,random_state=202)
scaler = StandardScaler()
X_train=scaler.fit_transform(X_train)
X_test=scaler.fit_transform(X_test)
else:
confirm='Not found'
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=1,column=3)
confirm_entrybox.insert(1,str(confirm))
Import_target_button=tk.Button(nn,text='Import target',command=Target_variable)
Import_target_button.grid(row=1,column=2)
#regressor model
from keras.models import Sequential
from keras.layers import Dense, Activation, Flatten
from keras import optimizers, regularizers
from keras import models
regressor_label=ttk.Label(nn,text='Step 3: Neural Network Regressor')
regressor_label.grid(row=2,column=0,sticky=tk.W)
#train the model
regressor_name=ttk.Label(nn,text='Regression')
regressor_name.grid(row=3,column=0,sticky=tk.E)
def Regressor():
global df,X,Y,reg_model
reg_model= Sequential()
#input layer
reg_model.add(Dense(256, input_shape=(X.shape[1],), activation='relu'))
#hidden layer
reg_model.add(Dense(256, activation='relu'))
#output layer
reg_model.add(Dense(1,activation='linear', kernel_regularizer=regularizers.l2(0)))
sgd=optimizers.SGD(lr=0.1, momentum=0.8, decay=1e-6)
reg_model.compile(optimizer=sgd, loss='mse')
reg_model.fit(X_train,Y_train, epochs=100, batch_size=64, verbose=1, validation_split=0.2)
confirm="Network trained"
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=3,column=2)
confirm_entrybox.insert(1,str(confirm))
Train_button=ttk.Button(nn,text='Train',command=Regressor)
Train_button.grid(row=3,column=1)
#pickle the model
pickle_name=ttk.Label(nn,text='Pickle')
pickle_name.grid(row=4,column=0,sticky=tk.E)
def RPickle():
global reg_model
reg_model.save('Regression_model.pkl')
confirm="Saved model to the disk"
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=4,column=2)
confirm_entrybox.insert(1,str(confirm))
Pickle_button=ttk.Button(nn,text='Pickle',command=RPickle)
Pickle_button.grid(row=4,column=1)
#classification model
from keras.utils import np_utils
classification_label=ttk.Label(nn,text='Step 3: Neural Network Classifier')
classification_label.grid(row=5,column=0,sticky=tk.W)
#train the model
classification_name=ttk.Label(nn,text='Classification')
classification_name.grid(row=6,column=0,sticky=tk.E)
def Classification():
global df,X,y,model,X_train,X_test,Y_train,Y_test
Y_train=np_utils.to_categorical(Y_train)
Y_test=np_utils.to_categorical(Y_test)
model= Sequential()
#input layer
model.add(Dense(256, input_shape=(X.shape[1],), activation='relu'))
#hidden layer
model.add(Dense(256, activation='relu'))
#output layer
model.add(Dense(Y_train.shape[1],activation='softmax', kernel_regularizer=regularizers.l2(0)))
sgd=optimizers.SGD(lr=0.1, momentum=0.8, decay=1e-6)
model.compile(optimizer=sgd, loss='mse',metrics='accuracy')
model.fit(X_train,Y_train, epochs=100, batch_size=64, validation_split=0.2)
confirm="Network trained"
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=6,column=2)
confirm_entrybox.insert(1,str(confirm))
Train_button=ttk.Button(nn,text='Train',command=Classification)
Train_button.grid(row=6,column=1)
#pickle the model
pickle_name=ttk.Label(nn,text='Pickle')
pickle_name.grid(row=7,column=0,sticky=tk.E)
def CPickle():
global model
model.save('Classification_model.pkl')
confirm="Saved model to the disk"
confirm_entrybox=ttk.Entry(nn,width=16)
confirm_entrybox.grid(row=7,column=2)
confirm_entrybox.insert(1,str(confirm))
Pickle_button=ttk.Button(nn,text='Pickle',command=CPickle)
Pickle_button.grid(row=7,column=1)
nn.mainloop()